hoomanjs 1.3.0 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -26,7 +26,7 @@ It gives you:
26
26
 
27
27
  ## Features
28
28
 
29
- - Multiple LLM providers: `ollama`, `openai`, `anthropic`, `google`, `bedrock`
29
+ - Multiple LLM providers: `ollama`, `openai`, `anthropic`, `google`, `bedrock`, `groq`, `xai`
30
30
  - Local configuration under `~/.hooman`
31
31
  - MCP server support via `stdio`, `streamable-http`, and `sse`
32
32
  - MCP server `instructions` support: server-provided instructions are appended to the agent system prompt
@@ -262,6 +262,8 @@ Supported `llm.provider` values:
262
262
  - `anthropic`
263
263
  - `google`
264
264
  - `bedrock`
265
+ - `groq`
266
+ - `xai`
265
267
 
266
268
  ## Provider Notes
267
269
 
@@ -328,6 +330,36 @@ Uses Strands `GoogleModel` on top of `@google/genai`. Top-level options like `ap
328
330
 
329
331
  Supports `region`, `clientConfig`, and optional `apiKey`, with all other values forwarded as Bedrock model options.
330
332
 
333
+ ### Groq
334
+
335
+ Uses the Vercel AI SDK Groq provider (`@ai-sdk/groq`) on top of Strands `VercelModel`. Provider-specific settings `apiKey`, `baseURL`, and `headers` are picked up; other values are forwarded into the model config (`temperature`, `maxTokens`, etc.). Defaults to `GROQ_API_KEY` from the environment when no `apiKey` is supplied.
336
+
337
+ ```json
338
+ {
339
+ "provider": "groq",
340
+ "model": "gemma2-9b-it",
341
+ "params": {
342
+ "apiKey": "...",
343
+ "temperature": 0.7
344
+ }
345
+ }
346
+ ```
347
+
348
+ ### xAI
349
+
350
+ Uses the Vercel AI SDK xAI provider (`@ai-sdk/xai`) on top of Strands `VercelModel`. Provider-specific settings `apiKey`, `baseURL`, and `headers` are picked up; other values are forwarded into the model config (`temperature`, `maxTokens`, etc.). Defaults to `XAI_API_KEY` from the environment when no `apiKey` is supplied.
351
+
352
+ ```json
353
+ {
354
+ "provider": "xai",
355
+ "model": "grok-4.20-non-reasoning",
356
+ "params": {
357
+ "apiKey": "...",
358
+ "temperature": 0.7
359
+ }
360
+ }
361
+ ```
362
+
331
363
  ## MCP Configuration
332
364
 
333
365
  `mcp.json` is stored as:
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "hoomanjs",
3
- "version": "1.3.0",
3
+ "version": "1.5.0",
4
4
  "description": "Bun-powered local AI agent CLI with chat, exec, ACP, MCP, and skills support.",
5
5
  "author": {
6
6
  "name": "Vaibhav Pandey",
@@ -51,6 +51,8 @@
51
51
  "dependencies": {
52
52
  "@agentclientprotocol/sdk": "^0.18.2",
53
53
  "@ai-sdk/anthropic": "^3.0.69",
54
+ "@ai-sdk/groq": "^3.0.35",
55
+ "@ai-sdk/xai": "^3.0.83",
54
56
  "@aws-sdk/client-bedrock-runtime": "^3.1028.0",
55
57
  "@google/genai": "^1.40.0",
56
58
  "@huggingface/transformers": "^4.0.1",
@@ -5,9 +5,11 @@ import { z } from "zod";
5
5
  export enum LlmProvider {
6
6
  Anthropic = "anthropic",
7
7
  Google = "google",
8
+ Groq = "groq",
8
9
  OpenAI = "openai",
9
10
  Ollama = "ollama",
10
11
  Bedrock = "bedrock",
12
+ Xai = "xai",
11
13
  }
12
14
 
13
15
  const LlmSchema = z.object({
@@ -0,0 +1,48 @@
1
+ import { createGroq, groq } from "@ai-sdk/groq";
2
+ import { VercelModel } from "@strands-agents/sdk/models/vercel";
3
+ import type { GroqProviderSettings } from "@ai-sdk/groq";
4
+ import type { VercelModelConfig } from "@strands-agents/sdk/models/vercel";
5
+ import { omit, pick } from "lodash";
6
+
7
+ const PROVIDER_SETTINGS_KEYS = ["apiKey", "baseURL", "headers"] as const;
8
+
9
+ function pickProviderSettings(
10
+ params: Record<string, unknown>,
11
+ ): GroqProviderSettings {
12
+ const picked = pick(params, [...PROVIDER_SETTINGS_KEYS]) as Record<
13
+ string,
14
+ unknown
15
+ >;
16
+ const unset = Object.keys(picked).filter((k) => picked[k] === undefined);
17
+ return omit(picked, unset) as GroqProviderSettings;
18
+ }
19
+
20
+ function pickVercelModelConfig(
21
+ params: Record<string, unknown>,
22
+ ): Partial<VercelModelConfig> {
23
+ return omit(params, [
24
+ ...PROVIDER_SETTINGS_KEYS,
25
+ ]) as Partial<VercelModelConfig>;
26
+ }
27
+
28
+ /**
29
+ * Groq via AI SDK + Strands {@link VercelModel}.
30
+ *
31
+ * - **`config.llm.model`**: model id passed to `groq(...)` (e.g. `gemma2-9b-it`).
32
+ * - **`params`**: {@link GroqProviderSettings} (`apiKey`, `baseURL`, `headers`).
33
+ * If none are set, the default provider is used (`GROQ_API_KEY` from env).
34
+ * - Any other `params` keys are forwarded as {@link VercelModelConfig} (e.g. `temperature`, `maxTokens`).
35
+ */
36
+ export function create(
37
+ model: string,
38
+ params: Record<string, unknown> = {},
39
+ ): VercelModel {
40
+ const settings = pickProviderSettings(params);
41
+ const provider =
42
+ Object.keys(settings).length > 0 ? createGroq(settings) : groq;
43
+ const config = pickVercelModelConfig(params);
44
+ return new VercelModel({
45
+ provider: provider(model),
46
+ ...config,
47
+ });
48
+ }
@@ -11,6 +11,8 @@ export const modelProviders: Record<string, () => Promise<ModelProvider>> = {
11
11
  anthropic: () => import("./anthropic.ts"),
12
12
  bedrock: () => import("./bedrock.ts"),
13
13
  google: () => import("./google.ts"),
14
+ groq: () => import("./groq.ts"),
14
15
  ollama: () => import("./ollama/index.ts"),
15
16
  openai: () => import("./openai.ts"),
17
+ xai: () => import("./xai.ts"),
16
18
  };
@@ -0,0 +1,47 @@
1
+ import { createXai, xai } from "@ai-sdk/xai";
2
+ import { VercelModel } from "@strands-agents/sdk/models/vercel";
3
+ import type { XaiProviderSettings } from "@ai-sdk/xai";
4
+ import type { VercelModelConfig } from "@strands-agents/sdk/models/vercel";
5
+ import { omit, pick } from "lodash";
6
+
7
+ const PROVIDER_SETTINGS_KEYS = ["apiKey", "baseURL", "headers"] as const;
8
+
9
+ function pickProviderSettings(
10
+ params: Record<string, unknown>,
11
+ ): XaiProviderSettings {
12
+ const picked = pick(params, [...PROVIDER_SETTINGS_KEYS]) as Record<
13
+ string,
14
+ unknown
15
+ >;
16
+ const unset = Object.keys(picked).filter((k) => picked[k] === undefined);
17
+ return omit(picked, unset) as XaiProviderSettings;
18
+ }
19
+
20
+ function pickVercelModelConfig(
21
+ params: Record<string, unknown>,
22
+ ): Partial<VercelModelConfig> {
23
+ return omit(params, [
24
+ ...PROVIDER_SETTINGS_KEYS,
25
+ ]) as Partial<VercelModelConfig>;
26
+ }
27
+
28
+ /**
29
+ * xAI (Grok) via AI SDK + Strands {@link VercelModel}.
30
+ *
31
+ * - **`config.llm.model`**: model id passed to `xai(...)` (e.g. `grok-4.20-non-reasoning`).
32
+ * - **`params`**: {@link XaiProviderSettings} (`apiKey`, `baseURL`, `headers`).
33
+ * If none are set, the default provider is used (`XAI_API_KEY` from env).
34
+ * - Any other `params` keys are forwarded as {@link VercelModelConfig} (e.g. `temperature`, `maxTokens`).
35
+ */
36
+ export function create(
37
+ model: string,
38
+ params: Record<string, unknown> = {},
39
+ ): VercelModel {
40
+ const settings = pickProviderSettings(params);
41
+ const provider = Object.keys(settings).length > 0 ? createXai(settings) : xai;
42
+ const config = pickVercelModelConfig(params);
43
+ return new VercelModel({
44
+ provider: provider(model),
45
+ ...config,
46
+ });
47
+ }