neonctl 2.10.1 → 2.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/analytics.js CHANGED
@@ -25,13 +25,26 @@ export const analyticsMiddleware = async (args) => {
25
25
  log.debug('Failed to read credentials file', err);
26
26
  }
27
27
  try {
28
- if (!userId && args.apiKey) {
28
+ if (args.apiKey) {
29
29
  const apiClient = getApiClient({
30
30
  apiKey: args.apiKey,
31
31
  apiHost: args.apiHost,
32
32
  });
33
- const resp = await apiClient?.getCurrentUserInfo?.();
34
- userId = resp?.data?.id;
33
+ // Populating api key details for analytics
34
+ const authDetailsResponse = await apiClient.getAuthDetails();
35
+ const authDetails = authDetailsResponse.data;
36
+ args.accountId = authDetails.account_id;
37
+ args.authMethod = authDetails.auth_method;
38
+ args.authData = authDetails.auth_data;
39
+ // Get user id if not org api key
40
+ if (!userId && authDetails.auth_method !== 'api_key_org') {
41
+ const resp = await apiClient?.getCurrentUserInfo?.();
42
+ userId = resp?.data?.id;
43
+ }
44
+ }
45
+ else {
46
+ args.accountId = userId;
47
+ args.authMethod = 'oauth';
35
48
  }
36
49
  }
37
50
  catch (err) {
package/commands/auth.js CHANGED
@@ -1,4 +1,4 @@
1
- import { existsSync, readFileSync, writeFileSync } from 'node:fs';
1
+ import { existsSync, readFileSync, writeFileSync, rmSync } from 'node:fs';
2
2
  import { join } from 'node:path';
3
3
  import { createHash } from 'node:crypto';
4
4
  import { TokenSet } from 'openid-client';
@@ -41,6 +41,7 @@ export const authFlow = async ({ configDir, oauthHost, clientId, apiHost, forceA
41
41
  const preserveCredentials = async (path, credentials, apiClient) => {
42
42
  const { data: { id }, } = await apiClient.getCurrentUserInfo();
43
43
  const contents = JSON.stringify({
44
+ // Making the linter happy by explicitly confirming we don't care about @typescript-eslint/no-misused-spread
44
45
  ...credentials,
45
46
  user_id: id,
46
47
  });
@@ -152,4 +153,25 @@ export const ensureAuth = async (props) => {
152
153
  apiHost: props.apiHost,
153
154
  });
154
155
  };
156
+ /**
157
+ * Deletes the credentials file at the specified path
158
+ * @param configDir Directory where credentials file is stored
159
+ */
160
+ export const deleteCredentials = (configDir) => {
161
+ const credentialsPath = join(configDir, CREDENTIALS_FILE);
162
+ try {
163
+ if (existsSync(credentialsPath)) {
164
+ rmSync(credentialsPath);
165
+ log.info('Deleted credentials from %s', credentialsPath);
166
+ }
167
+ else {
168
+ log.debug('Credentials file %s does not exist', credentialsPath);
169
+ }
170
+ }
171
+ catch (err) {
172
+ const typedErr = err instanceof Error ? err : new Error('Unknown error');
173
+ log.error('Failed to delete credentials: %s', typedErr.message);
174
+ throw new Error('CREDENTIALS_DELETE_FAILED');
175
+ }
176
+ };
155
177
  const md5hash = (s) => createHash('md5').update(s).digest('hex');
@@ -6,7 +6,7 @@ import { afterAll, beforeAll, beforeEach, describe, expect, vi } from 'vitest';
6
6
  import * as authModule from '../auth';
7
7
  import { test } from '../test_utils/fixtures';
8
8
  import { startOauthServer } from '../test_utils/oauth_server';
9
- import { authFlow, ensureAuth } from './auth';
9
+ import { authFlow, ensureAuth, deleteCredentials } from './auth';
10
10
  vi.mock('open', () => ({ default: vi.fn((url) => axios.get(url)) }));
11
11
  vi.mock('../pkg.ts', () => ({ default: { version: '0.0.0' } }));
12
12
  describe('auth', () => {
@@ -163,3 +163,33 @@ describe('ensureAuth', () => {
163
163
  expect(props.apiKey).toBe('new-token');
164
164
  });
165
165
  });
166
+ describe('deleteCredentials', () => {
167
+ let configDir = '';
168
+ beforeAll(() => {
169
+ configDir = mkdtempSync('test-config-delete');
170
+ });
171
+ afterAll(() => {
172
+ rmSync(configDir, { recursive: true });
173
+ });
174
+ test('should successfully delete credentials file', () => {
175
+ const credentialsPath = join(configDir, 'credentials.json');
176
+ writeFileSync(credentialsPath, 'test-content', { mode: 0o700 });
177
+ expect(existsSync(credentialsPath)).toBe(true);
178
+ deleteCredentials(configDir);
179
+ expect(existsSync(credentialsPath)).toBe(false);
180
+ });
181
+ test('should handle non-existent file gracefully', () => {
182
+ const nonExistentDir = mkdtempSync('test-config-nonexistent');
183
+ // Ensure the file doesn't exist
184
+ const credentialsPath = join(nonExistentDir, 'credentials.json');
185
+ if (existsSync(credentialsPath)) {
186
+ rmSync(credentialsPath);
187
+ }
188
+ expect(existsSync(credentialsPath)).toBe(false);
189
+ // Should not throw an error
190
+ expect(() => {
191
+ deleteCredentials(nonExistentDir);
192
+ }).not.toThrow();
193
+ rmSync(nonExistentDir, { recursive: true });
194
+ });
195
+ });
@@ -142,6 +142,10 @@ export const builder = (argv) => argv
142
142
  describe: 'The number of Compute Units. Could be a fixed size (e.g. "2") or a range delimited by a dash (e.g. "0.5-3").',
143
143
  type: 'string',
144
144
  },
145
+ name: {
146
+ type: 'string',
147
+ describe: 'Optional name of the compute',
148
+ },
145
149
  }), (args) => addCompute(args))
146
150
  .command('delete <id|name>', 'Delete a branch', (yargs) => yargs, (args) => deleteBranch(args))
147
151
  .command('get <id|name>', 'Get a branch', (yargs) => yargs, (args) => get(args))
@@ -319,11 +323,13 @@ const get = async (props) => {
319
323
  };
320
324
  const addCompute = async (props) => {
321
325
  const branchId = await branchIdFromProps(props);
326
+ const computeName = props.name ? { name: props.name } : null;
322
327
  const { data } = await retryOnLock(() => props.apiClient.createProjectEndpoint(props.projectId, {
323
328
  endpoint: {
324
329
  branch_id: branchId,
325
330
  type: props.type,
326
331
  ...(props.cu ? getComputeUnits(props.cu) : undefined),
332
+ ...computeName,
327
333
  },
328
334
  }));
329
335
  writer(props).end(data.endpoint, {
@@ -274,6 +274,19 @@ describe('branches', () => {
274
274
  '0.5-2',
275
275
  ]);
276
276
  });
277
+ test('add compute with a name', async ({ testCliCommand }) => {
278
+ await testCliCommand([
279
+ 'branches',
280
+ 'add-compute',
281
+ 'test_branch_with_autoscaling',
282
+ '--project-id',
283
+ 'test',
284
+ '--cu',
285
+ '0.5-2',
286
+ '--name',
287
+ 'My fancy new compute',
288
+ ]);
289
+ });
277
290
  /* reset */
278
291
  test('reset branch to parent', async ({ testCliCommand }) => {
279
292
  await testCliCommand([
@@ -52,6 +52,10 @@ export const builder = (argv) => {
52
52
  .description,
53
53
  type: 'boolean',
54
54
  },
55
+ hipaa: {
56
+ describe: projectCreateRequest['project.settings.hipaa'].description,
57
+ type: 'boolean',
58
+ },
55
59
  name: {
56
60
  describe: projectCreateRequest['project.name'].description,
57
61
  type: 'string',
@@ -102,6 +106,10 @@ export const builder = (argv) => {
102
106
  ' Use --block-public-connections=false to set the value to false.',
103
107
  type: 'boolean',
104
108
  },
109
+ hipaa: {
110
+ describe: projectUpdateRequest['project.settings.hipaa'].description,
111
+ type: 'boolean',
112
+ },
105
113
  cu: {
106
114
  describe: 'The number of Compute Units. Could be a fixed size (e.g. "2") or a range delimited by a dash (e.g. "0.5-3").',
107
115
  type: 'string',
@@ -164,6 +172,12 @@ const list = async (props) => {
164
172
  };
165
173
  const create = async (props) => {
166
174
  const project = {};
175
+ if (props.hipaa !== undefined) {
176
+ if (!project.settings) {
177
+ project.settings = {};
178
+ }
179
+ project.settings.hipaa = props.hipaa;
180
+ }
167
181
  if (props.blockPublicConnections !== undefined) {
168
182
  if (!project.settings) {
169
183
  project.settings = {};
@@ -226,6 +240,12 @@ const deleteProject = async (props) => {
226
240
  };
227
241
  const update = async (props) => {
228
242
  const project = {};
243
+ if (props.hipaa !== undefined) {
244
+ if (!project.settings) {
245
+ project.settings = {};
246
+ }
247
+ project.settings.hipaa = props.hipaa;
248
+ }
229
249
  if (props.blockPublicConnections !== undefined) {
230
250
  if (!project.settings) {
231
251
  project.settings = {};
@@ -13,6 +13,15 @@ describe('projects', () => {
13
13
  test('create', async ({ testCliCommand }) => {
14
14
  await testCliCommand(['projects', 'create', '--name', 'test_project']);
15
15
  });
16
+ test('create with hipaa flag', async ({ testCliCommand }) => {
17
+ await testCliCommand([
18
+ 'projects',
19
+ 'create',
20
+ '--name',
21
+ 'test_project',
22
+ '--hipaa',
23
+ ]);
24
+ });
16
25
  test('create with org id', async ({ testCliCommand }) => {
17
26
  await testCliCommand([
18
27
  'projects',
@@ -102,6 +111,9 @@ describe('projects', () => {
102
111
  'test_project_new_name',
103
112
  ]);
104
113
  });
114
+ test('update hipaa flag', async ({ testCliCommand }) => {
115
+ await testCliCommand(['projects', 'update', 'test', '--hipaa']);
116
+ });
105
117
  test('update project with default fixed size CU', async ({ testCliCommand, }) => {
106
118
  await testCliCommand([
107
119
  'projects',
@@ -59,14 +59,13 @@ const createSchemaDiff = async (baseBranch, pointInTime, database, props) => {
59
59
  };
60
60
  const fetchSchema = async (pointInTime, database, props) => {
61
61
  try {
62
- return props.apiClient
63
- .getProjectBranchSchema({
62
+ const response = await props.apiClient.getProjectBranchSchema({
64
63
  projectId: props.projectId,
65
64
  branchId: pointInTime.branchId,
66
65
  db_name: database.name,
67
66
  ...pointInTimeParams(pointInTime),
68
- })
69
- .then((response) => response.data.sql ?? '');
67
+ });
68
+ return response.data.sql ?? '';
70
69
  }
71
70
  catch (error) {
72
71
  if (isAxiosError(error)) {
package/index.js CHANGED
@@ -13,7 +13,7 @@ axiosDebug({
13
13
  debug(error);
14
14
  },
15
15
  });
16
- import { ensureAuth } from './commands/auth.js';
16
+ import { ensureAuth, deleteCredentials } from './commands/auth.js';
17
17
  import { defaultDir, ensureConfigDir } from './config.js';
18
18
  import { log } from './log.js';
19
19
  import { defaultClientID } from './auth.js';
@@ -143,19 +143,33 @@ builder = builder
143
143
  .scriptName(basename(process.argv[1]) === 'neon' ? 'neon' : 'neonctl')
144
144
  .epilog('For more information, visit https://neon.tech/docs/reference/neon-cli')
145
145
  .wrap(null)
146
- .fail(async (msg, err) => {
146
+ .fail(false);
147
+ async function handleError(msg, err) {
147
148
  if (process.argv.some((arg) => arg === '--help' || arg === '-h')) {
148
149
  await showHelp(builder);
149
150
  process.exit(0);
150
151
  }
152
+ // Log stack trace if available
153
+ if (err instanceof Error && err.stack) {
154
+ log.debug('Stack: %s', err.stack);
155
+ }
151
156
  if (isAxiosError(err)) {
152
157
  if (err.code === 'ECONNABORTED') {
153
158
  log.error('Request timed out');
154
159
  sendError(err, 'REQUEST_TIMEOUT');
160
+ return false;
155
161
  }
156
162
  else if (err.response?.status === 401) {
157
163
  sendError(err, 'AUTH_FAILED');
158
- log.error('Authentication failed, please run `neonctl auth`');
164
+ log.info('Authentication failed, deleting credentials...');
165
+ try {
166
+ deleteCredentials(defaultDir);
167
+ return true; // Allow retry for auth failures
168
+ }
169
+ catch (deleteErr) {
170
+ log.debug('Failed to delete credentials: %s', deleteErr instanceof Error ? deleteErr.message : 'unknown error');
171
+ return false;
172
+ }
159
173
  }
160
174
  else {
161
175
  if (err.response?.data?.message) {
@@ -163,33 +177,47 @@ builder = builder
163
177
  }
164
178
  log.debug('status: %d %s | path: %s', err.response?.status, err.response?.statusText, err.request?.path);
165
179
  sendError(err, 'API_ERROR');
180
+ return false;
166
181
  }
167
182
  }
168
183
  else {
169
- sendError(err || new Error(msg), matchErrorCode(msg || err?.message));
170
- log.error(msg || err?.message);
171
- }
172
- await closeAnalytics();
173
- if (err?.stack) {
174
- log.debug('Stack: %s', err.stack);
184
+ const error = err instanceof Error ? err : new Error(msg || 'Unknown error');
185
+ sendError(error, matchErrorCode(error.message));
186
+ log.error(error.message);
187
+ return false;
175
188
  }
176
- process.exit(1);
177
- });
189
+ }
178
190
  void (async () => {
179
- try {
180
- const args = await builder.argv;
181
- trackEvent('cli_command_success', {
182
- ...getAnalyticsEventProperties(args),
183
- projectId: args.projectId,
184
- branchId: args.branchId,
185
- });
186
- if (args._.length === 0 || args.help) {
187
- await showHelp(builder);
191
+ // Main loop with max 2 attempts (initial + 1 retry):
192
+ let attempts = 0;
193
+ const MAX_ATTEMPTS = 2;
194
+ while (attempts < MAX_ATTEMPTS) {
195
+ try {
196
+ const args = await builder.argv;
197
+ // Send analytics for a successful attempt
198
+ trackEvent('cli_command_success', {
199
+ ...getAnalyticsEventProperties(args),
200
+ projectId: args.projectId,
201
+ branchId: args.branchId,
202
+ accountId: args.accountId,
203
+ authMethod: args.authMethod,
204
+ authData: args.authData,
205
+ });
206
+ if (args._.length === 0 || args.help) {
207
+ await showHelp(builder);
208
+ process.exit(0);
209
+ }
210
+ await closeAnalytics();
188
211
  process.exit(0);
189
212
  }
190
- await closeAnalytics();
191
- }
192
- catch {
193
- // noop
213
+ catch (err) {
214
+ attempts++;
215
+ const shouldRetry = await handleError('', err);
216
+ if (!shouldRetry || attempts >= MAX_ATTEMPTS) {
217
+ await closeAnalytics();
218
+ process.exit(1);
219
+ }
220
+ // If shouldRetry is true and we haven't hit max attempts, loop continues
221
+ }
194
222
  }
195
223
  })();
package/package.json CHANGED
@@ -5,7 +5,7 @@
5
5
  "url": "git+ssh://git@github.com/neondatabase/neonctl.git"
6
6
  },
7
7
  "type": "module",
8
- "version": "2.10.1",
8
+ "version": "2.12.0",
9
9
  "description": "CLI tool for NeonDB Cloud management",
10
10
  "main": "index.js",
11
11
  "author": "NeonDB",
@@ -22,7 +22,7 @@
22
22
  "@apidevtools/swagger-parser": "^10.1.0",
23
23
  "@commitlint/cli": "^17.6.5",
24
24
  "@commitlint/config-conventional": "^17.6.5",
25
- "@eslint/js": "^9.6.0",
25
+ "@eslint/js": "^9.23.0",
26
26
  "@rollup/plugin-commonjs": "^25.0.2",
27
27
  "@rollup/plugin-json": "^6.0.0",
28
28
  "@rollup/plugin-node-resolve": "^15.1.0",
@@ -39,7 +39,7 @@
39
39
  "@types/which": "^3.0.0",
40
40
  "@types/yargs": "^17.0.24",
41
41
  "emocks": "^3.0.1",
42
- "eslint": "^9.6.0",
42
+ "eslint": "^9.23.0",
43
43
  "express": "^4.18.2",
44
44
  "husky": "^8.0.3",
45
45
  "lint-staged": "^13.0.3",
@@ -50,11 +50,11 @@
50
50
  "semantic-release": "^23.0.8",
51
51
  "strip-ansi": "^7.1.0",
52
52
  "typescript": "^4.7.4",
53
- "typescript-eslint": "v8.0.0-alpha.41",
54
- "vitest": "^1.6.1"
53
+ "typescript-eslint": "8.28.0",
54
+ "vitest": "^1.6.0"
55
55
  },
56
56
  "dependencies": {
57
- "@neondatabase/api-client": "1.12.0",
57
+ "@neondatabase/api-client": "2.1.0",
58
58
  "@segment/analytics-node": "^1.0.0-beta.26",
59
59
  "axios": "^1.4.0",
60
60
  "axios-debug-log": "^1.0.0",
package/parameters.gen.js CHANGED
@@ -22,7 +22,7 @@ export const projectCreateRequest = {
22
22
  },
23
23
  'project.settings.quota.logical_size_bytes': {
24
24
  type: "number",
25
- description: "Limit on the logical size of every project's branch.\n",
25
+ description: "Limit on the logical size of every project's branch.\n\nIf a branch exceeds its `logical_size_bytes` quota, computes can still be started,\nbut write operations will fail—allowing data to be deleted to free up space.\nComputes on other branches are not affected.\n\nSetting `logical_size_bytes` overrides any lower value set by the `neon.max_cluster_size` Postgres setting.\n",
26
26
  demandOption: false,
27
27
  },
28
28
  'project.settings.allowed_ips.ips': {
@@ -65,9 +65,30 @@ export const projectCreateRequest = {
65
65
  description: "When set, connections using VPC endpoints are disallowed.\nThis parameter is under active development and its semantics may change in the future.\n",
66
66
  demandOption: false,
67
67
  },
68
+ 'project.settings.audit_log_level': {
69
+ type: "string",
70
+ description: undefined,
71
+ demandOption: false,
72
+ choices: ["base", "extended", "full"],
73
+ },
74
+ 'project.settings.hipaa': {
75
+ type: "boolean",
76
+ description: undefined,
77
+ demandOption: false,
78
+ },
79
+ 'project.settings.preload_libraries.use_defaults': {
80
+ type: "boolean",
81
+ description: undefined,
82
+ demandOption: false,
83
+ },
84
+ 'project.settings.preload_libraries.enabled_libraries': {
85
+ type: "array",
86
+ description: undefined,
87
+ demandOption: false,
88
+ },
68
89
  'project.name': {
69
90
  type: "string",
70
- description: "The project name",
91
+ description: "The project name. If not specified, the name will be identical to the generated project ID",
71
92
  demandOption: false,
72
93
  },
73
94
  'project.branch.name': {
@@ -144,7 +165,7 @@ export const projectUpdateRequest = {
144
165
  },
145
166
  'project.settings.quota.logical_size_bytes': {
146
167
  type: "number",
147
- description: "Limit on the logical size of every project's branch.\n",
168
+ description: "Limit on the logical size of every project's branch.\n\nIf a branch exceeds its `logical_size_bytes` quota, computes can still be started,\nbut write operations will fail—allowing data to be deleted to free up space.\nComputes on other branches are not affected.\n\nSetting `logical_size_bytes` overrides any lower value set by the `neon.max_cluster_size` Postgres setting.\n",
148
169
  demandOption: false,
149
170
  },
150
171
  'project.settings.allowed_ips.ips': {
@@ -187,6 +208,27 @@ export const projectUpdateRequest = {
187
208
  description: "When set, connections using VPC endpoints are disallowed.\nThis parameter is under active development and its semantics may change in the future.\n",
188
209
  demandOption: false,
189
210
  },
211
+ 'project.settings.audit_log_level': {
212
+ type: "string",
213
+ description: undefined,
214
+ demandOption: false,
215
+ choices: ["base", "extended", "full"],
216
+ },
217
+ 'project.settings.hipaa': {
218
+ type: "boolean",
219
+ description: undefined,
220
+ demandOption: false,
221
+ },
222
+ 'project.settings.preload_libraries.use_defaults': {
223
+ type: "boolean",
224
+ description: undefined,
225
+ demandOption: false,
226
+ },
227
+ 'project.settings.preload_libraries.enabled_libraries': {
228
+ type: "array",
229
+ description: undefined,
230
+ demandOption: false,
231
+ },
190
232
  'project.name': {
191
233
  type: "string",
192
234
  description: "The project name",
@@ -241,7 +283,7 @@ export const branchCreateRequest = {
241
283
  },
242
284
  'branch.init_source': {
243
285
  type: "string",
244
- description: "The initialization source type for the branch. Valid values are `schema-only` and `parent-data`.\nThis parameter is under active development and may change its semantics in the future.\n",
286
+ description: "The source of initialization for the branch. Valid values are `schema-only` and `parent-data` (default).\n * `schema-only` - creates a new root branch containing only the schema. Use `parent_id` to specify the source branch. Optionally, you can provide `parent_lsn` or `parent_timestamp` to branch from a specific point in time or LSN. These fields define which branch to copy the schema from and at what point—they do not establish a parent-child relationship between the `parent_id` branch and the new schema-only branch.\n * `parent-data` - creates the branch with both schema and data from the parent.\n",
245
287
  demandOption: false,
246
288
  },
247
289
  };