@digipair/skill-llm 0.72.6 → 0.72.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.cjs2.js CHANGED
@@ -58394,7 +58394,8 @@ let LLMService = class LLMService {
58394
58394
  const { model, prompt, schema, image } = params;
58395
58395
  let chain;
58396
58396
  if (!schema) {
58397
- const modelInstance = await engine.executePinsList(model != null ? model : context.privates.MODEL_VISION, context, `${context.__PATH__}.model`);
58397
+ var _ref;
58398
+ const modelInstance = await engine.executePinsList((_ref = model != null ? model : context.privates.MODEL_VISION) != null ? _ref : context.privates.MODEL_LLM, context, `${context.__PATH__}.model`);
58398
58399
  chain = RunnableSequence.from([
58399
58400
  PromptTemplate.fromTemplate(prompt != null ? prompt : '{prompt}'),
58400
58401
  (text)=>[
@@ -58416,8 +58417,8 @@ let LLMService = class LLMService {
58416
58417
  modelInstance
58417
58418
  ]);
58418
58419
  } else {
58419
- var _ref;
58420
- const modelInstance = await engine.executePinsList((_ref = model != null ? model : context.privates.MODEL_VISION_JSON) != null ? _ref : context.privates.MODEL_VISION, context, `${context.__PATH__}.model`);
58420
+ var _ref1, _ref2, _ref3;
58421
+ const modelInstance = await engine.executePinsList((_ref3 = (_ref2 = (_ref1 = model != null ? model : context.privates.MODEL_VISION_JSON) != null ? _ref1 : context.privates.MODEL_VISION) != null ? _ref2 : context.privates.MODEL_LLM_JSON) != null ? _ref3 : context.privates.MODEL_LLM, context, `${context.__PATH__}.model`);
58421
58422
  const parser = new StructuredOutputParser(this.jsonSchemaToZod(schema));
58422
58423
  chain = RunnableSequence.from([
58423
58424
  PromptTemplate.fromTemplate(`${prompt != null ? prompt : '{prompt}'}
package/index.esm2.js CHANGED
@@ -23899,14 +23899,14 @@ function indent(str, spaces) {
23899
23899
  var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
23900
23900
  // match is required
23901
23901
  if (!match) {
23902
- return i = i1, tokens = tokens1, nextMatch = nextMatch1, {
23902
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, {
23903
23903
  v: nextMatch1
23904
23904
  };
23905
23905
  }
23906
23906
  var token = match.token, offset = match.offset;
23907
23907
  i1 += offset;
23908
23908
  if (token === " ") {
23909
- return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
23909
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, "continue";
23910
23910
  }
23911
23911
  tokens1 = _to_consumable_array$D(tokens1).concat([
23912
23912
  token
@@ -23925,7 +23925,7 @@ function indent(str, spaces) {
23925
23925
  if (contextKeys.some(function(el) {
23926
23926
  return el.startsWith(name);
23927
23927
  })) {
23928
- return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
23928
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, "continue";
23929
23929
  }
23930
23930
  if (dateTimeIdentifiers.some(function(el) {
23931
23931
  return el === name;
@@ -23944,9 +23944,9 @@ function indent(str, spaces) {
23944
23944
  if (dateTimeIdentifiers.some(function(el) {
23945
23945
  return el.startsWith(name);
23946
23946
  })) {
23947
- return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
23947
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, "continue";
23948
23948
  }
23949
- return i = i1, tokens = tokens1, nextMatch = nextMatch1, {
23949
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, {
23950
23950
  v: nextMatch1
23951
23951
  };
23952
23952
  };
@@ -86361,7 +86361,8 @@ let LLMService = class LLMService {
86361
86361
  const { model, prompt, schema, image } = params;
86362
86362
  let chain;
86363
86363
  if (!schema) {
86364
- const modelInstance = await executePinsList(model != null ? model : context.privates.MODEL_VISION, context, `${context.__PATH__}.model`);
86364
+ var _ref;
86365
+ const modelInstance = await executePinsList((_ref = model != null ? model : context.privates.MODEL_VISION) != null ? _ref : context.privates.MODEL_LLM, context, `${context.__PATH__}.model`);
86365
86366
  chain = RunnableSequence.from([
86366
86367
  PromptTemplate.fromTemplate(prompt != null ? prompt : '{prompt}'),
86367
86368
  (text)=>[
@@ -86383,8 +86384,8 @@ let LLMService = class LLMService {
86383
86384
  modelInstance
86384
86385
  ]);
86385
86386
  } else {
86386
- var _ref;
86387
- const modelInstance = await executePinsList((_ref = model != null ? model : context.privates.MODEL_VISION_JSON) != null ? _ref : context.privates.MODEL_VISION, context, `${context.__PATH__}.model`);
86387
+ var _ref1, _ref2, _ref3;
86388
+ const modelInstance = await executePinsList((_ref3 = (_ref2 = (_ref1 = model != null ? model : context.privates.MODEL_VISION_JSON) != null ? _ref1 : context.privates.MODEL_VISION) != null ? _ref2 : context.privates.MODEL_LLM_JSON) != null ? _ref3 : context.privates.MODEL_LLM, context, `${context.__PATH__}.model`);
86388
86389
  const parser = new StructuredOutputParser(this.jsonSchemaToZod(schema));
86389
86390
  chain = RunnableSequence.from([
86390
86391
  PromptTemplate.fromTemplate(`${prompt != null ? prompt : '{prompt}'}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@digipair/skill-llm",
3
- "version": "0.72.6",
3
+ "version": "0.72.7",
4
4
  "dependencies": {},
5
5
  "main": "./index.cjs.js",
6
6
  "module": "./index.esm.js"