llmz 0.0.34 → 0.0.35

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -1112,11 +1112,11 @@ var utils = {
1112
1112
  truncateWrappedContent: _chunkGZPN7RGHcjs.truncateWrappedContent
1113
1113
  };
1114
1114
  var execute = async (props) => {
1115
- const { executeContext } = await Promise.resolve().then(() => _interopRequireWildcard(require("./llmz-YU4TWA2R.cjs")));
1115
+ const { executeContext } = await Promise.resolve().then(() => _interopRequireWildcard(require("./llmz-XRJIDXIZ.cjs")));
1116
1116
  return executeContext(props);
1117
1117
  };
1118
1118
  var init = async () => {
1119
- await Promise.resolve().then(() => _interopRequireWildcard(require("./llmz-YU4TWA2R.cjs")));
1119
+ await Promise.resolve().then(() => _interopRequireWildcard(require("./llmz-XRJIDXIZ.cjs")));
1120
1120
  await Promise.resolve().then(() => _interopRequireWildcard(require("./component-AAEMNCHB.cjs")));
1121
1121
  await Promise.resolve().then(() => _interopRequireWildcard(require("./tool-JYXLFJGQ.cjs")));
1122
1122
  await Promise.resolve().then(() => _interopRequireWildcard(require("./exit-XAYKJ6TR.cjs")));
package/dist/index.js CHANGED
@@ -1112,11 +1112,11 @@ var utils = {
1112
1112
  truncateWrappedContent
1113
1113
  };
1114
1114
  var execute = async (props) => {
1115
- const { executeContext } = await import("./llmz-HAHKYXAJ.js");
1115
+ const { executeContext } = await import("./llmz-NX2764TL.js");
1116
1116
  return executeContext(props);
1117
1117
  };
1118
1118
  var init = async () => {
1119
- await import("./llmz-HAHKYXAJ.js");
1119
+ await import("./llmz-NX2764TL.js");
1120
1120
  await import("./component-EOMTLA64.js");
1121
1121
  await import("./tool-ADXBCB4R.js");
1122
1122
  await import("./exit-YLO7BY7Z.js");
@@ -44,6 +44,7 @@ import {
44
44
  } from "./chunk-7WRN4E42.js";
45
45
 
46
46
  // src/llmz.ts
47
+ import { Client } from "@botpress/client";
47
48
  import { Cognitive } from "@botpress/cognitive";
48
49
  import { z } from "@bpinternal/zui";
49
50
  import ms from "ms";
@@ -105,7 +106,8 @@ var _executeContext = async (props) => {
105
106
  var _a, _b;
106
107
  const controller = createJoinedAbortController([props.signal]);
107
108
  const { onIterationStart, onIterationEnd, onTrace, onExit, onBeforeExecution, onAfterTool, onBeforeTool } = props;
108
- const cognitive = Cognitive.isCognitiveClient(props.client) ? props.client : new Cognitive({ client: props.client });
109
+ const client = props.client ?? new Client();
110
+ const cognitive = Cognitive.isCognitiveClient(client) ? client : new Cognitive({ client });
109
111
  const cleanups = [];
110
112
  const ctx = new Context({
111
113
  chat: props.chat,
@@ -219,7 +221,7 @@ var executeIteration = async ({
219
221
  onBeforeTool,
220
222
  onAfterTool
221
223
  }) => {
222
- var _a, _b, _c, _d, _e;
224
+ var _a, _b, _c, _d;
223
225
  let startedAt = Date.now();
224
226
  const traces = iteration.traces;
225
227
  const model = await cognitive.getModelDetails(Array.isArray(iteration.model) ? iteration.model[0] : iteration.model);
@@ -252,7 +254,7 @@ var executeIteration = async ({
252
254
  messages: messages.filter((x) => x.role !== "system"),
253
255
  stopSequences: ctx.version.getStopTokens()
254
256
  });
255
- const out = ((_c = (_b = output.output.choices) == null ? void 0 : _b[0]) == null ? void 0 : _c.type) === "text" && typeof ((_d = output.output.choices) == null ? void 0 : _d[0].content) === "string" ? output.output.choices[0].content : null;
257
+ const out = typeof ((_c = (_b = output.output.choices) == null ? void 0 : _b[0]) == null ? void 0 : _c.content) === "string" ? output.output.choices[0].content : null;
256
258
  if (!out) {
257
259
  throw new CognitiveError("LLM did not return any text output");
258
260
  }
@@ -428,7 +430,7 @@ var executeIteration = async ({
428
430
  return iteration.end({
429
431
  type: "execution_error",
430
432
  execution_error: {
431
- message: ((_e = result == null ? void 0 : result.error) == null ? void 0 : _e.message) ?? "Unknown error occurred",
433
+ message: ((_d = result == null ? void 0 : result.error) == null ? void 0 : _d.message) ?? "Unknown error occurred",
432
434
  stack: cleanStackTrace(result.error.stack ?? "No stack trace available")
433
435
  }
434
436
  });
@@ -44,6 +44,7 @@ var _chunkWHNOR4ZUcjs = require('./chunk-WHNOR4ZU.cjs');
44
44
  var _chunkUQOBUJIQcjs = require('./chunk-UQOBUJIQ.cjs');
45
45
 
46
46
  // src/llmz.ts
47
+ var _client = require('@botpress/client');
47
48
  var _cognitive = require('@botpress/cognitive');
48
49
  var _zui = require('@bpinternal/zui');
49
50
  var _ms = require('ms'); var _ms2 = _interopRequireDefault(_ms);
@@ -105,7 +106,8 @@ var _executeContext = async (props) => {
105
106
  var _a, _b;
106
107
  const controller = createJoinedAbortController([props.signal]);
107
108
  const { onIterationStart, onIterationEnd, onTrace, onExit, onBeforeExecution, onAfterTool, onBeforeTool } = props;
108
- const cognitive = _cognitive.Cognitive.isCognitiveClient(props.client) ? props.client : new (0, _cognitive.Cognitive)({ client: props.client });
109
+ const client = _nullishCoalesce(props.client, () => ( new (0, _client.Client)()));
110
+ const cognitive = _cognitive.Cognitive.isCognitiveClient(client) ? client : new (0, _cognitive.Cognitive)({ client });
109
111
  const cleanups = [];
110
112
  const ctx = new (0, _chunkB742DUFYcjs.Context)({
111
113
  chat: props.chat,
@@ -219,7 +221,7 @@ var executeIteration = async ({
219
221
  onBeforeTool,
220
222
  onAfterTool
221
223
  }) => {
222
- var _a, _b, _c, _d, _e;
224
+ var _a, _b, _c, _d;
223
225
  let startedAt = Date.now();
224
226
  const traces = iteration.traces;
225
227
  const model = await cognitive.getModelDetails(Array.isArray(iteration.model) ? iteration.model[0] : iteration.model);
@@ -252,7 +254,7 @@ var executeIteration = async ({
252
254
  messages: messages.filter((x) => x.role !== "system"),
253
255
  stopSequences: ctx.version.getStopTokens()
254
256
  });
255
- const out = ((_c = (_b = output.output.choices) == null ? void 0 : _b[0]) == null ? void 0 : _c.type) === "text" && typeof ((_d = output.output.choices) == null ? void 0 : _d[0].content) === "string" ? output.output.choices[0].content : null;
257
+ const out = typeof ((_c = (_b = output.output.choices) == null ? void 0 : _b[0]) == null ? void 0 : _c.content) === "string" ? output.output.choices[0].content : null;
256
258
  if (!out) {
257
259
  throw new (0, _chunkPK72FAKDcjs.CognitiveError)("LLM did not return any text output");
258
260
  }
@@ -428,7 +430,7 @@ var executeIteration = async ({
428
430
  return iteration.end({
429
431
  type: "execution_error",
430
432
  execution_error: {
431
- message: _nullishCoalesce(((_e = result == null ? void 0 : result.error) == null ? void 0 : _e.message), () => ( "Unknown error occurred")),
433
+ message: _nullishCoalesce(((_d = result == null ? void 0 : result.error) == null ? void 0 : _d.message), () => ( "Unknown error occurred")),
432
434
  stack: _chunkKIN7Y247cjs.cleanStackTrace.call(void 0, _nullishCoalesce(result.error.stack, () => ( "No stack trace available")))
433
435
  }
434
436
  });
package/dist/llmz.d.ts CHANGED
@@ -167,8 +167,9 @@ export type ExecutionProps = {
167
167
  /**
168
168
  * An instance of a Botpress Client, or an instance of Cognitive Client (@botpress/cognitive).
169
169
  * This is used to generate content using the LLM and to access the Botpress API.
170
+ * If not provided, a default client will be created using environment variables.
170
171
  */
171
- client: Cognitive | BotpressClientLike;
172
+ client?: Cognitive | BotpressClientLike;
172
173
  /**
173
174
  * When provided, the execution will immediately stop when the signal is aborted.
174
175
  * This will stop the LLM generation, as well as kill the VM sandbox execution.
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "llmz",
3
3
  "type": "module",
4
4
  "description": "LLMz – An LLM-native Typescript VM built on top of Zui",
5
- "version": "0.0.34",
5
+ "version": "0.0.35",
6
6
  "types": "./dist/index.d.ts",
7
7
  "main": "./dist/index.cjs",
8
8
  "module": "./dist/index.js",
@@ -71,10 +71,10 @@
71
71
  "tsx": "^4.19.2"
72
72
  },
73
73
  "peerDependencies": {
74
- "@botpress/client": "1.27.2",
75
- "@botpress/cognitive": "0.3.1",
74
+ "@botpress/client": "1.28.0",
75
+ "@botpress/cognitive": "0.3.3",
76
76
  "@bpinternal/thicktoken": "^1.0.5",
77
- "@bpinternal/zui": "1.2.3"
77
+ "@bpinternal/zui": "1.3.1"
78
78
  },
79
79
  "dependenciesMeta": {
80
80
  "@bpinternal/zui": {