ak-claude 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -295,6 +295,110 @@ For `CodeAgent`, `stop()` also kills any currently running child process via SIG
295
295
 
296
296
  All classes extend `BaseClaude` and share these features (except `AgentQuery`, which wraps the Claude Agent SDK separately).
297
297
 
298
+ ### Raw SDK Client Access
299
+
300
+ All classes expose the underlying SDK clients via the `clients` namespace for advanced use cases:
301
+
302
+ ```javascript
303
+ import { Chat } from 'ak-claude';
304
+
305
+ const chat = new Chat({ apiKey: process.env.ANTHROPIC_API_KEY });
306
+ await chat.init();
307
+
308
+ // Access raw SDK clients
309
+ console.log(chat.clients.anthropic); // @anthropic-ai/sdk client (or null if using Vertex)
310
+ console.log(chat.clients.vertex); // @anthropic-ai/vertex-sdk client (or null if using direct API)
311
+ console.log(chat.clients.raw); // Convenience pointer to whichever is active
312
+
313
+ // Use raw client for SDK features not yet wrapped
314
+ for await (const model of chat.clients.raw.beta.models.list()) {
315
+ console.log(model.id, model.display_name);
316
+ }
317
+
318
+ // Access message batches API
319
+ const batch = await chat.clients.anthropic.messages.batches.create({
320
+ requests: [
321
+ { custom_id: 'req1', params: { model: 'claude-haiku-4-5-20251001', max_tokens: 1024, messages: [...] } },
322
+ { custom_id: 'req2', params: { model: 'claude-haiku-4-5-20251001', max_tokens: 1024, messages: [...] } }
323
+ ]
324
+ });
325
+
326
+ // Count tokens directly
327
+ const tokenCount = await chat.clients.raw.messages.countTokens({
328
+ model: 'claude-sonnet-4-6',
329
+ messages: chat.history
330
+ });
331
+ ```
332
+
333
+ **When to use:**
334
+ - Access new SDK features before they're wrapped
335
+ - Beta APIs and experimental features
336
+ - Low-level operations (message batches, etc.)
337
+ - SDK-specific functionality
338
+
339
+ **Common patterns:**
340
+
341
+ ```javascript
342
+ // Check which client is active
343
+ console.log('Using Anthropic:', chat.clients.anthropic !== null);
344
+ console.log('Using Vertex:', chat.clients.vertex !== null);
345
+
346
+ // Advanced streaming
347
+ const stream = chat.clients.raw.messages.stream({
348
+ model: 'claude-sonnet-4-6',
349
+ max_tokens: 1024,
350
+ messages: [{ role: 'user', content: 'Hello' }]
351
+ });
352
+ stream.on('text', (delta) => process.stdout.write(delta));
353
+ await stream.finalMessage();
354
+ ```
355
+
356
+ The original `client` property remains for backward compatibility (`client === clients.raw`).
357
+
358
+ ### Model Discovery
359
+
360
+ List and inspect available Claude models (direct API only, not Vertex AI):
361
+
362
+ ```javascript
363
+ import { Chat } from 'ak-claude';
364
+
365
+ const chat = new Chat({ apiKey: process.env.ANTHROPIC_API_KEY });
366
+
367
+ // List all available models
368
+ for await (const model of chat.listModels()) {
369
+ console.log(model.id); // "claude-sonnet-4-6"
370
+ console.log(model.display_name); // "Claude 4.6 Sonnet"
371
+ console.log(model.created_at); // RFC 3339 datetime
372
+ }
373
+
374
+ // Get info about a specific model
375
+ const modelInfo = await chat.getModel('claude-sonnet-4-6');
376
+ console.log(modelInfo);
377
+
378
+ // Find newest model
379
+ let newestModel = null;
380
+ let newestDate = new Date(0);
381
+ for await (const model of chat.listModels()) {
382
+ const createdAt = new Date(model.created_at);
383
+ if (createdAt > newestDate) {
384
+ newestDate = createdAt;
385
+ newestModel = model;
386
+ }
387
+ }
388
+
389
+ // Check if a model exists
390
+ async function modelExists(modelId) {
391
+ try {
392
+ await chat.getModel(modelId);
393
+ return true;
394
+ } catch (err) {
395
+ return err.status !== 404;
396
+ }
397
+ }
398
+ ```
399
+
400
+ **Note:** These helpers only work with direct Anthropic API authentication, not Vertex AI. You can also access the models API via the raw client: `chat.clients.raw.beta.models.list()`.
401
+
298
402
  ### Authentication
299
403
 
300
404
  ```javascript
@@ -308,6 +412,8 @@ new Chat({ vertexai: true, vertexProjectId: 'my-project', vertexRegion: 'us-cent
308
412
  new Chat({ apiKey: 'your-key' }); // or ANTHROPIC_API_KEY / CLAUDE_API_KEY env var
309
413
  ```
310
414
 
415
+ **Note:** Vertex AI doesn't allow both `temperature` and `topP` to be specified together. When using Vertex AI, the module automatically uses only `temperature` if both are set, and `topP` is not sent to the API. The default for Vertex AI is `temperature: 0.7` (no `topP`).
416
+
311
417
  ### Token Estimation
312
418
 
313
419
  Uses Claude's `countTokens` API for exact input token counts before sending.
package/base.js CHANGED
@@ -82,7 +82,8 @@ class BaseClaude {
82
82
  // ── Generation Config ──
83
83
  this.maxTokens = options.maxTokens ?? DEFAULT_MAX_TOKENS;
84
84
  this.temperature = options.temperature ?? 0.7;
85
- this.topP = options.topP ?? 0.95;
85
+ // Vertex AI doesn't allow both temperature and topP - only set topP default for direct API
86
+ this.topP = options.topP ?? (this.vertexai ? undefined : 0.95);
86
87
  this.topK = options.topK ?? undefined;
87
88
 
88
89
  // ── Extended Thinking ──
@@ -109,11 +110,22 @@ class BaseClaude {
109
110
  // _ensureClient() is called at the start of every API method.
110
111
  this.client = null;
111
112
  this._clientReady = false;
113
+
114
+ // ── Clients Namespace (for raw SDK access) ──
115
+ // Exposes the underlying SDK clients for advanced use cases
116
+ this.clients = {
117
+ anthropic: null, // @anthropic-ai/sdk client (direct API)
118
+ vertex: null, // @anthropic-ai/vertex-sdk client
119
+ raw: null // Convenience pointer to whichever is active
120
+ };
121
+
112
122
  if (!this.vertexai) {
113
123
  this.client = new Anthropic({
114
124
  apiKey: this.apiKey,
115
125
  maxRetries: this.maxRetries
116
126
  });
127
+ this.clients.anthropic = this.client;
128
+ this.clients.raw = this.client;
117
129
  this._clientReady = true;
118
130
  }
119
131
 
@@ -174,6 +186,8 @@ class BaseClaude {
174
186
  }
175
187
  return superBuildRequest.call(this, options, extra);
176
188
  };
189
+ this.clients.vertex = this.client;
190
+ this.clients.raw = this.client;
177
191
  this._clientReady = true;
178
192
  log.debug(`${this.constructor.name}: Vertex AI client created (project=${this.vertexProjectId}, region=${this.vertexRegion})`);
179
193
  }
@@ -282,8 +296,15 @@ class BaseClaude {
282
296
  params.thinking = this.thinking;
283
297
  // When thinking is enabled, temperature must be 1 and top_p/top_k are not supported
284
298
  } else {
285
- if (this.temperature !== undefined) params.temperature = this.temperature;
286
- if (this.topP !== undefined) params.top_p = this.topP;
299
+ // Vertex AI doesn't allow both temperature and topP
300
+ if (this.vertexai && this.temperature !== undefined && this.topP !== undefined) {
301
+ // Prefer temperature, skip topP for Vertex AI
302
+ params.temperature = this.temperature;
303
+ log.debug('Vertex AI: Using temperature only (topP ignored)');
304
+ } else {
305
+ if (this.temperature !== undefined) params.temperature = this.temperature;
306
+ if (this.topP !== undefined) params.top_p = this.topP;
307
+ }
287
308
  }
288
309
 
289
310
  const response = await this.client.messages.create(params);
@@ -328,8 +349,15 @@ class BaseClaude {
328
349
  if (this.thinking) {
329
350
  params.thinking = this.thinking;
330
351
  } else {
331
- if (this.temperature !== undefined) params.temperature = this.temperature;
332
- if (this.topP !== undefined) params.top_p = this.topP;
352
+ // Vertex AI doesn't allow both temperature and topP
353
+ if (this.vertexai && this.temperature !== undefined && this.topP !== undefined) {
354
+ // Prefer temperature, skip topP for Vertex AI
355
+ params.temperature = this.temperature;
356
+ log.debug('Vertex AI: Using temperature only (topP ignored)');
357
+ } else {
358
+ if (this.temperature !== undefined) params.temperature = this.temperature;
359
+ if (this.topP !== undefined) params.top_p = this.topP;
360
+ }
333
361
  }
334
362
 
335
363
  const stream = this.client.messages.stream(params);
@@ -563,6 +591,53 @@ class BaseClaude {
563
591
  };
564
592
  }
565
593
 
594
+ // ── Model Management ─────────────────────────────────────────────────────
595
+
596
+ /**
597
+ * Lists all available models from the Anthropic API.
598
+ * Provides model IDs, display names, and creation dates.
599
+ * Returns an async iterable that automatically fetches more pages as needed.
600
+ *
601
+ * NOTE: Only available with direct Anthropic API access (not Vertex AI).
602
+ * @returns {AsyncIterable<Object>} AsyncIterable of model objects
603
+ * @throws {Error} If using Vertex AI authentication
604
+ * @example
605
+ * const chat = new Chat({ apiKey: 'your-key' });
606
+ * for await (const model of chat.listModels()) {
607
+ * console.log(model.id, model.display_name);
608
+ * }
609
+ */
610
+ async *listModels() {
611
+ if (this.vertexai) {
612
+ throw new Error('listModels() is not available with Vertex AI. Use direct Anthropic API authentication instead.');
613
+ }
614
+ await this._ensureClient();
615
+ const pageIterator = this.client.beta.models.list();
616
+ for await (const model of pageIterator) {
617
+ yield model;
618
+ }
619
+ }
620
+
621
+ /**
622
+ * Retrieves detailed information about a specific model.
623
+ *
624
+ * NOTE: Only available with direct Anthropic API access (not Vertex AI).
625
+ * @param {string} modelId - The model ID (e.g., 'claude-sonnet-4-6')
626
+ * @returns {Promise<Object>} The model details
627
+ * @throws {Error} If using Vertex AI authentication
628
+ * @example
629
+ * const chat = new Chat({ apiKey: 'your-key' });
630
+ * const modelInfo = await chat.getModel('claude-sonnet-4-6');
631
+ * console.log(modelInfo);
632
+ */
633
+ async getModel(modelId) {
634
+ if (this.vertexai) {
635
+ throw new Error('getModel() is not available with Vertex AI. Use direct Anthropic API authentication instead.');
636
+ }
637
+ await this._ensureClient();
638
+ return await this.client.beta.models.retrieve(modelId);
639
+ }
640
+
566
641
  // ── Application-Level Retry ──────────────────────────────────────────────
567
642
 
568
643
  /**
package/index.cjs CHANGED
@@ -342,7 +342,7 @@ var BaseClaude = class {
342
342
  }
343
343
  this.maxTokens = options.maxTokens ?? DEFAULT_MAX_TOKENS;
344
344
  this.temperature = options.temperature ?? 0.7;
345
- this.topP = options.topP ?? 0.95;
345
+ this.topP = options.topP ?? (this.vertexai ? void 0 : 0.95);
346
346
  this.topK = options.topK ?? void 0;
347
347
  this.thinking = options.thinking ?? null;
348
348
  this.cacheSystemPrompt = options.cacheSystemPrompt ?? false;
@@ -353,11 +353,21 @@ var BaseClaude = class {
353
353
  this._configureLogLevel(options.logLevel);
354
354
  this.client = null;
355
355
  this._clientReady = false;
356
+ this.clients = {
357
+ anthropic: null,
358
+ // @anthropic-ai/sdk client (direct API)
359
+ vertex: null,
360
+ // @anthropic-ai/vertex-sdk client
361
+ raw: null
362
+ // Convenience pointer to whichever is active
363
+ };
356
364
  if (!this.vertexai) {
357
365
  this.client = new import_sdk.default({
358
366
  apiKey: this.apiKey,
359
367
  maxRetries: this.maxRetries
360
368
  });
369
+ this.clients.anthropic = this.client;
370
+ this.clients.raw = this.client;
361
371
  this._clientReady = true;
362
372
  }
363
373
  this.history = [];
@@ -408,6 +418,8 @@ var BaseClaude = class {
408
418
  }
409
419
  return superBuildRequest.call(this, options, extra);
410
420
  };
421
+ this.clients.vertex = this.client;
422
+ this.clients.raw = this.client;
411
423
  this._clientReady = true;
412
424
  logger_default.debug(`${this.constructor.name}: Vertex AI client created (project=${this.vertexProjectId}, region=${this.vertexRegion})`);
413
425
  }
@@ -495,8 +507,13 @@ var BaseClaude = class {
495
507
  if (this.thinking) {
496
508
  params.thinking = this.thinking;
497
509
  } else {
498
- if (this.temperature !== void 0) params.temperature = this.temperature;
499
- if (this.topP !== void 0) params.top_p = this.topP;
510
+ if (this.vertexai && this.temperature !== void 0 && this.topP !== void 0) {
511
+ params.temperature = this.temperature;
512
+ logger_default.debug("Vertex AI: Using temperature only (topP ignored)");
513
+ } else {
514
+ if (this.temperature !== void 0) params.temperature = this.temperature;
515
+ if (this.topP !== void 0) params.top_p = this.topP;
516
+ }
500
517
  }
501
518
  const response = await this.client.messages.create(params);
502
519
  this.history.push({ role: "assistant", content: response.content });
@@ -528,8 +545,13 @@ var BaseClaude = class {
528
545
  if (this.thinking) {
529
546
  params.thinking = this.thinking;
530
547
  } else {
531
- if (this.temperature !== void 0) params.temperature = this.temperature;
532
- if (this.topP !== void 0) params.top_p = this.topP;
548
+ if (this.vertexai && this.temperature !== void 0 && this.topP !== void 0) {
549
+ params.temperature = this.temperature;
550
+ logger_default.debug("Vertex AI: Using temperature only (topP ignored)");
551
+ } else {
552
+ if (this.temperature !== void 0) params.temperature = this.temperature;
553
+ if (this.topP !== void 0) params.top_p = this.topP;
554
+ }
533
555
  }
534
556
  const stream = this.client.messages.stream(params);
535
557
  return stream;
@@ -724,6 +746,50 @@ ${contextText}
724
746
  note: "Cost is for input tokens only; output cost depends on response length"
725
747
  };
726
748
  }
749
+ // ── Model Management ─────────────────────────────────────────────────────
750
+ /**
751
+ * Lists all available models from the Anthropic API.
752
+ * Provides model IDs, display names, and creation dates.
753
+ * Returns an async iterable that automatically fetches more pages as needed.
754
+ *
755
+ * NOTE: Only available with direct Anthropic API access (not Vertex AI).
756
+ * @returns {AsyncIterable<Object>} AsyncIterable of model objects
757
+ * @throws {Error} If using Vertex AI authentication
758
+ * @example
759
+ * const chat = new Chat({ apiKey: 'your-key' });
760
+ * for await (const model of chat.listModels()) {
761
+ * console.log(model.id, model.display_name);
762
+ * }
763
+ */
764
+ async *listModels() {
765
+ if (this.vertexai) {
766
+ throw new Error("listModels() is not available with Vertex AI. Use direct Anthropic API authentication instead.");
767
+ }
768
+ await this._ensureClient();
769
+ const pageIterator = this.client.beta.models.list();
770
+ for await (const model of pageIterator) {
771
+ yield model;
772
+ }
773
+ }
774
+ /**
775
+ * Retrieves detailed information about a specific model.
776
+ *
777
+ * NOTE: Only available with direct Anthropic API access (not Vertex AI).
778
+ * @param {string} modelId - The model ID (e.g., 'claude-sonnet-4-6')
779
+ * @returns {Promise<Object>} The model details
780
+ * @throws {Error} If using Vertex AI authentication
781
+ * @example
782
+ * const chat = new Chat({ apiKey: 'your-key' });
783
+ * const modelInfo = await chat.getModel('claude-sonnet-4-6');
784
+ * console.log(modelInfo);
785
+ */
786
+ async getModel(modelId) {
787
+ if (this.vertexai) {
788
+ throw new Error("getModel() is not available with Vertex AI. Use direct Anthropic API authentication instead.");
789
+ }
790
+ await this._ensureClient();
791
+ return await this.client.beta.models.retrieve(modelId);
792
+ }
727
793
  // ── Application-Level Retry ──────────────────────────────────────────────
728
794
  /**
729
795
  * Wraps an async function with retry logic.
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "ak-claude",
3
3
  "author": "ak@mixpanel.com",
4
4
  "description": "AK's Claude AI Helper for doing... everything",
5
- "version": "0.0.1",
5
+ "version": "0.0.2",
6
6
  "main": "index.js",
7
7
  "files": [
8
8
  "index.js",
package/types.d.ts CHANGED
@@ -402,6 +402,15 @@ export declare class BaseClaude {
402
402
  modelName: string;
403
403
  systemPrompt: string | null | false;
404
404
  client: any;
405
+ /** Raw SDK clients namespace for advanced use cases */
406
+ clients: {
407
+ /** @anthropic-ai/sdk client (null when using Vertex AI) */
408
+ anthropic: any | null;
409
+ /** @anthropic-ai/vertex-sdk client (null when using direct API) */
410
+ vertex: any | null;
411
+ /** Convenience pointer to whichever client is active */
412
+ raw: any;
413
+ };
405
414
  history: any[];
406
415
  lastResponseMetadata: ResponseMetadata | null;
407
416
  exampleCount: number;
@@ -427,6 +436,8 @@ export declare class BaseClaude {
427
436
  estimatedInputCost: number;
428
437
  note: string;
429
438
  }>;
439
+ listModels(): AsyncGenerator<any, void, unknown>;
440
+ getModel(modelId: string): Promise<any>;
430
441
  }
431
442
 
432
443
  export declare class Transformer extends BaseClaude {