@botpress/cognitive 0.1.37 → 0.1.38

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,19 +1,19 @@
1
1
 
2
- > @botpress/cognitive@0.1.37 build /home/runner/work/botpress/botpress/packages/cognitive
2
+ > @botpress/cognitive@0.1.38 build /home/runner/work/botpress/botpress/packages/cognitive
3
3
  > pnpm build:type && pnpm build:neutral && size-limit
4
4
 
5
5
 
6
- > @botpress/cognitive@0.1.37 build:type /home/runner/work/botpress/botpress/packages/cognitive
6
+ > @botpress/cognitive@0.1.38 build:type /home/runner/work/botpress/botpress/packages/cognitive
7
7
  > tsup --tsconfig tsconfig.build.json ./src/index.ts --dts-resolve --dts-only --clean
8
8
 
9
9
  CLI Building entry: ./src/index.ts
10
10
  CLI Using tsconfig: tsconfig.build.json
11
11
  CLI tsup v8.0.2
12
12
  DTS Build start
13
- DTS ⚡️ Build success in 14071ms
14
- DTS dist/index.d.ts 623.18 KB
13
+ DTS ⚡️ Build success in 18531ms
14
+ DTS dist/index.d.ts 623.39 KB
15
15
 
16
- > @botpress/cognitive@0.1.37 build:neutral /home/runner/work/botpress/botpress/packages/cognitive
16
+ > @botpress/cognitive@0.1.38 build:neutral /home/runner/work/botpress/botpress/packages/cognitive
17
17
  > ts-node -T ./build.ts --neutral
18
18
 
19
19
  Done
package/dist/index.d.ts CHANGED
@@ -18956,6 +18956,7 @@ declare class Cognitive {
18956
18956
  private _generateContent;
18957
18957
  }
18958
18958
 
18959
+ type Models = 'auto' | 'auto-best' | 'auto-fast' | 'auto-reasoning' | 'auto-cheapest' | 'auto-balance' | 'anthropic:claude-3-5-haiku-20241022' | 'anthropic:claude-3-5-sonnet-20240620' | 'anthropic:claude-3-5-sonnet-20241022' | 'anthropic:claude-3-7-sonnet-20250219' | 'anthropic:claude-3-haiku-20240307' | 'anthropic:claude-sonnet-4-20250514' | 'cerebras:gpt-oss-120b' | 'cerebras:llama-4-scout-17b-16e-instruct' | 'cerebras:llama3.1-8b' | 'cerebras:llama3.3-70b' | 'cerebras:qwen-3-32b' | 'google-ai:gemini-2.5-flash' | 'google-ai:gemini-2.5-pro' | 'google-ai:models/gemini-2.0-flash' | 'groq:openai/gpt-oss-120b' | 'groq:openai/gpt-oss-20b' | 'openai:gpt-4.1-2025-04-14' | 'openai:gpt-4.1-mini-2025-04-14' | 'openai:gpt-4.1-nano-2025-04-14' | 'openai:gpt-4o-2024-11-20' | 'openai:gpt-4o-mini-2024-07-18' | 'openai:gpt-5-2025-08-07' | 'openai:gpt-5-mini-2025-08-07' | 'openai:gpt-5-nano-2025-08-07' | 'openai:o1-2024-12-17' | 'openai:o1-mini-2024-09-12' | 'openai:o3-2025-04-16' | 'openai:o3-mini-2025-01-31' | 'openai:o4-mini-2025-04-16' | 'openrouter:gpt-oss-120b' | ({} & string);
18959
18960
  type CognitiveRequest = {
18960
18961
  /**
18961
18962
  * @minItems 1
@@ -18972,9 +18973,9 @@ type CognitiveRequest = {
18972
18973
  type?: string;
18973
18974
  }[];
18974
18975
  /**
18975
- * Model ID or routing goal for automatic selection.
18976
+ * Model to query. Additional models are used as fallback if the main model is unavailable
18976
18977
  */
18977
- model?: 'auto' | 'auto-best' | 'auto-fast' | 'auto-reasoning' | 'auto-cheapest' | 'auto-balance' | 'anthropic/claude-3-5-haiku-20241022' | 'anthropic/claude-3-5-sonnet-20240620' | 'anthropic/claude-3-5-sonnet-20241022' | 'anthropic/claude-3-7-sonnet-20250219' | 'anthropic/claude-3-haiku-20240307' | 'anthropic/claude-sonnet-4-20250514' | 'cerebras/gpt-oss-120b' | 'cerebras/llama-4-scout-17b-16e-instruct' | 'cerebras/llama3.1-8b' | 'cerebras/llama3.3-70b' | 'cerebras/qwen-3-32b' | 'google-ai/gemini-2.5-flash' | 'google-ai/gemini-2.5-pro' | 'google-ai/models/gemini-2.0-flash' | 'groq/openai/gpt-oss-120b' | 'groq/openai/gpt-oss-20b' | 'openai/gpt-4.1-2025-04-14' | 'openai/gpt-4.1-mini-2025-04-14' | 'openai/gpt-4.1-nano-2025-04-14' | 'openai/gpt-4o-2024-11-20' | 'openai/gpt-4o-mini-2024-07-18' | 'openai/gpt-5-2025-08-07' | 'openai/gpt-5-mini-2025-08-07' | 'openai/gpt-5-nano-2025-08-07' | 'openai/o1-2024-12-17' | 'openai/o1-mini-2024-09-12' | 'openai/o3-2025-04-16' | 'openai/o3-mini-2025-01-31' | 'openai/o4-mini-2025-04-16' | 'openrouter/gpt-oss-120b';
18978
+ model?: Models | Models[];
18978
18979
  systemPrompt?: string;
18979
18980
  temperature?: number;
18980
18981
  maxTokens?: number;
@@ -19008,6 +19009,9 @@ type CognitiveStreamChunk = {
19008
19009
  * List of models that were tried and failed
19009
19010
  */
19010
19011
  fallbackPath?: string[];
19012
+ debug?: {
19013
+ [k: string]: string;
19014
+ };
19011
19015
  };
19012
19016
  };
19013
19017
  type CognitiveResponse = {
@@ -19033,6 +19037,9 @@ type CognitiveResponse = {
19033
19037
  * List of models that were tried and failed
19034
19038
  */
19035
19039
  fallbackPath?: string[];
19040
+ debug?: {
19041
+ [k: string]: string;
19042
+ };
19036
19043
  };
19037
19044
  error?: string;
19038
19045
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@botpress/cognitive",
3
- "version": "0.1.37",
3
+ "version": "0.1.38",
4
4
  "description": "Wrapper around the Botpress Client to call LLMs",
5
5
  "main": "./dist/index.cjs",
6
6
  "module": "./dist/index.mjs",