@botpress/cognitive 0.1.43 → 0.1.45

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -214,6 +214,7 @@ type CognitiveProps = {
214
214
  maxRetries?: number;
215
215
  /** Whether to use the beta client. Restricted to authorized users. */
216
216
  __experimental_beta?: boolean;
217
+ __debug?: boolean;
217
218
  };
218
219
  type Events = {
219
220
  aborted: (req: Request, reason?: string) => void;
@@ -1026,6 +1027,7 @@ type ClientConfig = {
1026
1027
  headers: Headers;
1027
1028
  withCredentials: boolean;
1028
1029
  timeout: number;
1030
+ debug: boolean;
1029
1031
  };
1030
1032
 
1031
1033
  type CommonClientProps = {
@@ -1033,6 +1035,7 @@ type CommonClientProps = {
1033
1035
  timeout?: number;
1034
1036
  headers?: Headers;
1035
1037
  retry?: RetryConfig;
1038
+ debug?: boolean;
1036
1039
  };
1037
1040
  type SimplifyTuple<T> = T extends [...infer A] ? {
1038
1041
  [K in keyof A]: Simplify<A[K]>;
@@ -18069,12 +18072,12 @@ declare class Client extends Client$1 implements IClient {
18069
18072
  pictureUrl?: string;
18070
18073
  }>;
18071
18074
  events: (props: {
18075
+ status?: "pending" | "ignored" | "processed" | "failed" | "scheduled" | undefined;
18072
18076
  type?: string | undefined;
18073
18077
  userId?: string | undefined;
18074
18078
  conversationId?: string | undefined;
18075
18079
  workflowId?: string | undefined;
18076
18080
  messageId?: string | undefined;
18077
- status?: "pending" | "ignored" | "processed" | "failed" | "scheduled" | undefined;
18078
18081
  }) => AsyncCollection<{
18079
18082
  id: string;
18080
18083
  createdAt: string;
@@ -18125,13 +18128,13 @@ declare class Client extends Client$1 implements IClient {
18125
18128
  pictureUrl?: string;
18126
18129
  }>;
18127
18130
  tasks: (props: {
18131
+ status?: ("timeout" | "pending" | "failed" | "in_progress" | "completed" | "blocked" | "paused" | "cancelled")[] | undefined;
18128
18132
  tags?: {
18129
18133
  [x: string]: string;
18130
18134
  } | undefined;
18131
18135
  type?: string | undefined;
18132
18136
  userId?: string | undefined;
18133
18137
  conversationId?: string | undefined;
18134
- status?: ("pending" | "failed" | "in_progress" | "completed" | "blocked" | "paused" | "timeout" | "cancelled")[] | undefined;
18135
18138
  parentTaskId?: string | undefined;
18136
18139
  }) => AsyncCollection<{
18137
18140
  id: string;
@@ -18964,6 +18967,7 @@ declare class Cognitive {
18964
18967
  protected _provider: ModelProvider;
18965
18968
  protected _downtimes: ModelPreferences['downtimes'];
18966
18969
  protected _useBeta: boolean;
18970
+ protected _debug: boolean;
18967
18971
  private _events;
18968
18972
  constructor(props: CognitiveProps);
18969
18973
  get client(): ExtendedClient;
@@ -18979,7 +18983,7 @@ declare class Cognitive {
18979
18983
  private _generateContent;
18980
18984
  }
18981
18985
 
18982
- type Models = 'auto' | 'best' | 'fast' | 'reasoning' | 'cheapest' | 'balance' | 'recommended' | 'reasoning' | 'general-purpose' | 'low-cost' | 'vision' | 'coding' | 'function-calling' | 'agents' | 'storytelling' | 'preview' | 'roleplay' | 'anthropic:claude-3-5-haiku-20241022' | 'anthropic:claude-3-5-sonnet-20240620' | 'anthropic:claude-3-5-sonnet-20241022' | 'anthropic:claude-3-7-sonnet-20250219' | 'anthropic:claude-3-haiku-20240307' | 'anthropic:claude-sonnet-4-20250514' | 'cerebras:gpt-oss-120b' | 'cerebras:llama-4-scout-17b-16e-instruct' | 'cerebras:llama3.1-8b' | 'cerebras:llama3.3-70b' | 'cerebras:qwen-3-32b' | 'fireworks-ai:deepseek-r1' | 'fireworks-ai:deepseek-r1-0528' | 'fireworks-ai:deepseek-r1-basic' | 'fireworks-ai:deepseek-v3-0324' | 'fireworks-ai:gpt-oss-120b' | 'fireworks-ai:gpt-oss-20b' | 'fireworks-ai:llama-v3p1-8b-instruct' | 'fireworks-ai:llama-v3p3-70b-instruct' | 'fireworks-ai:llama4-maverick-instruct-basic' | 'fireworks-ai:llama4-scout-instruct-basic' | 'fireworks-ai:mixtral-8x22b-instruct' | 'fireworks-ai:mixtral-8x7b-instruct' | 'fireworks-ai:mythomax-l2-13b' | 'google-ai:gemini-2.5-flash' | 'google-ai:gemini-2.5-pro' | 'google-ai:models/gemini-2.0-flash' | 'groq:deepseek-r1-distill-llama-70b' | 'groq:gemma2-9b-it' | 'groq:llama-3.1-8b-instant' | 'groq:llama-3.3-70b-versatile' | 'groq:openai/gpt-oss-120b' | 'groq:openai/gpt-oss-20b' | 'openai:gpt-4.1-2025-04-14' | 'openai:gpt-4.1-mini-2025-04-14' | 'openai:gpt-4.1-nano-2025-04-14' | 'openai:gpt-4o-2024-11-20' | 'openai:gpt-4o-mini-2024-07-18' | 'openai:gpt-5-2025-08-07' | 'openai:gpt-5-mini-2025-08-07' | 'openai:gpt-5-nano-2025-08-07' | 'openai:o1-2024-12-17' | 'openai:o1-mini-2024-09-12' | 'openai:o3-2025-04-16' | 'openai:o3-mini-2025-01-31' | 'openai:o4-mini-2025-04-16' | 'openrouter:gpt-oss-120b' | 'xai:grok-3' | 'xai:grok-3-mini' | 'xai:grok-4-0709' | 'xai:grok-4-fast-non-reasoning' | 'xai:grok-4-fast-reasoning' | 'xai:grok-code-fast-1' | ({} & string);
18986
+ type Models = 'auto' | 'best' | 'fast' | 'anthropic:claude-3-5-haiku-20241022' | 'anthropic:claude-3-5-sonnet-20240620' | 'anthropic:claude-3-5-sonnet-20241022' | 'anthropic:claude-3-7-sonnet-20250219' | 'anthropic:claude-3-haiku-20240307' | 'anthropic:claude-sonnet-4-20250514' | 'cerebras:gpt-oss-120b' | 'cerebras:llama-4-scout-17b-16e-instruct' | 'cerebras:llama3.1-8b' | 'cerebras:llama3.3-70b' | 'cerebras:qwen-3-32b' | 'fireworks-ai:deepseek-r1' | 'fireworks-ai:deepseek-r1-0528' | 'fireworks-ai:deepseek-r1-basic' | 'fireworks-ai:deepseek-v3-0324' | 'fireworks-ai:gpt-oss-120b' | 'fireworks-ai:gpt-oss-20b' | 'fireworks-ai:llama-v3p1-8b-instruct' | 'fireworks-ai:llama-v3p3-70b-instruct' | 'fireworks-ai:llama4-maverick-instruct-basic' | 'fireworks-ai:llama4-scout-instruct-basic' | 'fireworks-ai:mixtral-8x22b-instruct' | 'fireworks-ai:mixtral-8x7b-instruct' | 'fireworks-ai:mythomax-l2-13b' | 'google-ai:gemini-2.5-flash' | 'google-ai:gemini-2.5-pro' | 'google-ai:models/gemini-2.0-flash' | 'groq:deepseek-r1-distill-llama-70b' | 'groq:gemma2-9b-it' | 'groq:llama-3.1-8b-instant' | 'groq:llama-3.3-70b-versatile' | 'groq:openai/gpt-oss-120b' | 'groq:openai/gpt-oss-20b' | 'openai:gpt-4.1-2025-04-14' | 'openai:gpt-4.1-mini-2025-04-14' | 'openai:gpt-4.1-nano-2025-04-14' | 'openai:gpt-4o-2024-11-20' | 'openai:gpt-4o-mini-2024-07-18' | 'openai:gpt-5-2025-08-07' | 'openai:gpt-5-mini-2025-08-07' | 'openai:gpt-5-nano-2025-08-07' | 'openai:o1-2024-12-17' | 'openai:o1-mini-2024-09-12' | 'openai:o3-2025-04-16' | 'openai:o3-mini-2025-01-31' | 'openai:o4-mini-2025-04-16' | 'openrouter:gpt-oss-120b' | 'xai:grok-3' | 'xai:grok-3-mini' | 'xai:grok-4-0709' | 'xai:grok-4-fast-non-reasoning' | 'xai:grok-4-fast-reasoning' | 'xai:grok-code-fast-1' | ({} & string);
18983
18987
  type CognitiveRequest = {
18984
18988
  /**
18985
18989
  * @minItems 1
@@ -19055,13 +19059,17 @@ type CognitiveResponse = {
19055
19059
  model?: string;
19056
19060
  usage: {
19057
19061
  inputTokens: number;
19062
+ inputCost: number;
19058
19063
  outputTokens: number;
19059
- reasoningTokens?: number;
19064
+ outputCost: number;
19060
19065
  };
19061
19066
  cost?: number;
19062
19067
  cached?: boolean;
19068
+ /**
19069
+ * Time it took for the provider to respond to the LLM query
19070
+ */
19063
19071
  latency?: number;
19064
- stopReason?: string;
19072
+ stopReason?: 'stop' | 'length' | 'content_filter' | 'error';
19065
19073
  reasoningEffort?: string;
19066
19074
  warnings?: {
19067
19075
  type: 'parameter_ignored' | 'provider_limitation' | 'deprecated_model' | 'fallback_used';
@@ -19112,19 +19120,23 @@ type ClientProps = {
19112
19120
  botId?: string;
19113
19121
  token?: string;
19114
19122
  withCredentials?: boolean;
19123
+ debug?: boolean;
19115
19124
  headers?: Record<string, string>;
19116
19125
  };
19117
19126
  type RequestOptions = {
19118
19127
  signal?: AbortSignal;
19119
19128
  timeout?: number;
19120
19129
  };
19130
+
19121
19131
  declare class CognitiveBeta {
19122
19132
  private _axiosClient;
19123
19133
  private readonly _apiUrl;
19124
19134
  private readonly _timeout;
19125
19135
  private readonly _withCredentials;
19126
19136
  private readonly _headers;
19137
+ private readonly _debug;
19127
19138
  constructor(props: ClientProps);
19139
+ clone(): CognitiveBeta;
19128
19140
  generateText(input: CognitiveRequest, options?: RequestOptions): Promise<CognitiveResponse>;
19129
19141
  listModels(): Promise<Model[]>;
19130
19142
  generateTextStream(request: CognitiveRequest, options?: RequestOptions): AsyncGenerator<CognitiveStreamChunk, void, unknown>;
@@ -19134,4 +19146,4 @@ declare class CognitiveBeta {
19134
19146
  }
19135
19147
  declare const getCognitiveV2Model: (model: string) => Model | undefined;
19136
19148
 
19137
- export { type BotpressClientLike, Cognitive, CognitiveBeta, type CognitiveRequest, type CognitiveResponse, type CognitiveStreamChunk, type Events, type GenerateContentInput, type GenerateContentOutput, type Model$1 as Model, type ModelPreferences, ModelProvider, RemoteModelProvider, getCognitiveV2Model };
19149
+ export { type BotpressClientLike, Cognitive, CognitiveBeta, type CognitiveRequest, type CognitiveResponse, type CognitiveStreamChunk, type Events, type GenerateContentInput, type GenerateContentOutput, type Model$1 as Model, type ModelPreferences, ModelProvider, type Models, RemoteModelProvider, getCognitiveV2Model };
package/dist/index.mjs CHANGED
@@ -1726,12 +1726,13 @@ var defaultModel = {
1726
1726
 
1727
1727
  // src/cognitive-v2/index.ts
1728
1728
  var isBrowser = () => typeof window !== "undefined" && typeof window.fetch === "function";
1729
- var CognitiveBeta = class {
1729
+ var CognitiveBeta = class _CognitiveBeta {
1730
1730
  _axiosClient;
1731
1731
  _apiUrl;
1732
1732
  _timeout;
1733
1733
  _withCredentials;
1734
1734
  _headers;
1735
+ _debug = false;
1735
1736
  constructor(props) {
1736
1737
  this._apiUrl = props.apiUrl || "https://api.botpress.cloud";
1737
1738
  this._timeout = props.timeout || 60001;
@@ -1743,12 +1744,25 @@ var CognitiveBeta = class {
1743
1744
  if (props.token) {
1744
1745
  this._headers["Authorization"] = `Bearer ${props.token}`;
1745
1746
  }
1747
+ if (props.debug) {
1748
+ this._debug = true;
1749
+ this._headers["X-Debug"] = "1";
1750
+ }
1746
1751
  this._axiosClient = axios.create({
1747
1752
  headers: this._headers,
1748
1753
  withCredentials: this._withCredentials,
1749
1754
  baseURL: this._apiUrl
1750
1755
  });
1751
1756
  }
1757
+ clone() {
1758
+ return new _CognitiveBeta({
1759
+ apiUrl: this._apiUrl,
1760
+ timeout: this._timeout,
1761
+ withCredentials: this._withCredentials,
1762
+ headers: this._headers,
1763
+ debug: this._debug
1764
+ });
1765
+ }
1752
1766
  async generateText(input, options = {}) {
1753
1767
  const signal = options.signal ?? AbortSignal.timeout(this._timeout);
1754
1768
  const { data } = await this._withServerRetry(
@@ -1807,7 +1821,7 @@ var CognitiveBeta = class {
1807
1821
  }
1808
1822
  const res = await this._withServerRetry(
1809
1823
  () => this._axiosClient.post(
1810
- "/v1/generate-text-stream",
1824
+ "/v2/cognitive/generate-text-stream",
1811
1825
  { ...request, stream: true },
1812
1826
  {
1813
1827
  responseType: "stream",
@@ -2140,6 +2154,7 @@ var Cognitive = class _Cognitive {
2140
2154
  _provider;
2141
2155
  _downtimes = [];
2142
2156
  _useBeta = false;
2157
+ _debug = false;
2143
2158
  _events = createNanoEvents();
2144
2159
  constructor(props) {
2145
2160
  this._client = getExtendedClient(props.client);
@@ -2156,7 +2171,9 @@ var Cognitive = class _Cognitive {
2156
2171
  client: this._client.clone(),
2157
2172
  provider: this._provider,
2158
2173
  timeout: this._timeoutMs,
2159
- maxRetries: this._maxRetries
2174
+ maxRetries: this._maxRetries,
2175
+ __debug: this._debug,
2176
+ __experimental_beta: this._useBeta
2160
2177
  });
2161
2178
  copy._models = [...this._models];
2162
2179
  copy._preferences = this._preferences ? { ...this._preferences } : null;