@digipair/skill-dsp 0.21.1 → 0.21.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.cjs.js CHANGED
@@ -20358,6 +20358,7 @@ about the parse state.
20358
20358
  var _a;
20359
20359
  var depth = action >> 19 /* Action.ReduceDepthShift */ , type = action & 65535 /* Action.ValueMask */ ;
20360
20360
  var parser = this.p.parser;
20361
+ if (this.reducePos < this.pos - 25 /* Lookahead.Margin */ ) this.setLookAhead(this.pos);
20361
20362
  var dPrec = parser.dynamicPrecedence(type);
20362
20363
  if (dPrec) this.score += dPrec;
20363
20364
  if (depth == 0) {
@@ -21310,7 +21311,7 @@ function cutAt(tree, pos, side) {
21310
21311
  cursor.moveTo(pos);
21311
21312
  for(;;){
21312
21313
  if (!(side < 0 ? cursor.childBefore(pos) : cursor.childAfter(pos))) for(;;){
21313
- if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Safety.Margin */ )) : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Safety.Margin */ ));
21314
+ if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Lookahead.Margin */ )) : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Lookahead.Margin */ ));
21314
21315
  if (side < 0 ? cursor.prevSibling() : cursor.nextSibling()) break;
21315
21316
  if (!cursor.parent()) return side < 0 ? 0 : tree.length;
21316
21317
  }
@@ -21433,7 +21434,7 @@ var TokenCache = /*#__PURE__*/ function() {
21433
21434
  token.mask = mask;
21434
21435
  token.context = context;
21435
21436
  }
21436
- if (token.lookAhead > token.end + 25 /* Safety.Margin */ ) lookAhead = Math.max(token.lookAhead, lookAhead);
21437
+ if (token.lookAhead > token.end + 25 /* Lookahead.Margin */ ) lookAhead = Math.max(token.lookAhead, lookAhead);
21437
21438
  if (token.value != 0 /* Term.Err */ ) {
21438
21439
  var startIndex = actionIndex;
21439
21440
  if (token.extended > -1) actionIndex = this.addActions(stack, token.extended, token.end, actionIndex);
@@ -24374,6 +24375,7 @@ var parser = LRParser.deserialize({
24374
24375
  },
24375
24376
  dynamicPrecedences: {
24376
24377
  "31": -1,
24378
+ "67": 1,
24377
24379
  "71": -1,
24378
24380
  "73": -1
24379
24381
  },
@@ -27375,7 +27377,9 @@ const applyTemplate = (value, context)=>{
27375
27377
  result = template(context);
27376
27378
  if (result.startsWith('EVALUATE:')) {
27377
27379
  const path = result.replace(/^EVALUATE:/, '');
27378
- result = evaluate(path, context);
27380
+ result = evaluate(path, _extends({}, context, {
27381
+ getTime: (time)=>new Date(time).getTime()
27382
+ }));
27379
27383
  }
27380
27384
  } else if (typeof value === 'object' && Array.isArray(value)) {
27381
27385
  result = value.map((item)=>isPinsSettings(item) ? item : applyTemplate(item, context));
@@ -28629,7 +28633,7 @@ var axBaseAIDefaultConfig = function() {
28629
28633
  };
28630
28634
  var AxBaseAI = /*#__PURE__*/ function() {
28631
28635
  function AxBaseAI(param) {
28632
- var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor;
28636
+ var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor, modelMap = param.modelMap;
28633
28637
  _class_call_check$p(this, AxBaseAI);
28634
28638
  _define_property$p(this, "generateChatReq", void 0);
28635
28639
  _define_property$p(this, "generateEmbedReq", void 0);
@@ -28641,48 +28645,29 @@ var AxBaseAI = /*#__PURE__*/ function() {
28641
28645
  _define_property$p(this, "fetch", void 0);
28642
28646
  _define_property$p(this, "tracer", void 0);
28643
28647
  _define_property$p(this, "modelMap", void 0);
28644
- _define_property$p(this, "embedModelMap", void 0);
28648
+ _define_property$p(this, "modelInfo", void 0);
28645
28649
  _define_property$p(this, "modelUsage", void 0);
28646
28650
  _define_property$p(this, "embedModelUsage", void 0);
28651
+ _define_property$p(this, "models", void 0);
28647
28652
  _define_property$p(this, "apiURL", void 0);
28648
28653
  _define_property$p(this, "name", void 0);
28649
28654
  _define_property$p(this, "headers", void 0);
28650
- _define_property$p(this, "modelInfo", void 0);
28651
- _define_property$p(this, "embedModelInfo", void 0);
28652
28655
  _define_property$p(this, "supportFor", void 0);
28653
28656
  this.name = name;
28654
28657
  this.apiURL = apiURL;
28655
28658
  this.headers = headers;
28656
28659
  this.supportFor = supportFor;
28657
28660
  this.tracer = options.tracer;
28658
- var model = this.getModel(models.model);
28659
- var embedModel = this.getEmbedModel(models.embedModel);
28660
- if (typeof model === "string") {
28661
- var modelName = model.replace(/-0\d+$|-\d{2,}$/, "");
28662
- var _modelInfo_filter_at;
28663
- this.modelInfo = (_modelInfo_filter_at = modelInfo.filter(function(v) {
28664
- return v.name === modelName;
28665
- }).at(0)) !== null && _modelInfo_filter_at !== void 0 ? _modelInfo_filter_at : {
28666
- name: model,
28667
- currency: "usd",
28668
- promptTokenCostPer1M: 0,
28669
- completionTokenCostPer1M: 0
28670
- };
28671
- } else {
28661
+ this.modelInfo = modelInfo;
28662
+ this.modelMap = modelMap;
28663
+ var _modelMap_models_model, _models_embedModel, _modelMap_;
28664
+ this.models = {
28665
+ model: (_modelMap_models_model = modelMap === null || modelMap === void 0 ? void 0 : modelMap[models.model]) !== null && _modelMap_models_model !== void 0 ? _modelMap_models_model : models.model,
28666
+ embedModel: (_modelMap_ = modelMap === null || modelMap === void 0 ? void 0 : modelMap[(_models_embedModel = models.embedModel) !== null && _models_embedModel !== void 0 ? _models_embedModel : ""]) !== null && _modelMap_ !== void 0 ? _modelMap_ : models.embedModel
28667
+ };
28668
+ if (!models.model || typeof models.model !== "string" || models.model === "") {
28672
28669
  throw new Error("No model defined");
28673
28670
  }
28674
- if (typeof embedModel === "string") {
28675
- var embedModelName = embedModel === null || embedModel === void 0 ? void 0 : embedModel.replace(/-0\d+$|-\d{2,}$/, "");
28676
- var _modelInfo_filter_at1;
28677
- this.embedModelInfo = (_modelInfo_filter_at1 = modelInfo.filter(function(v) {
28678
- return v.name === embedModelName;
28679
- }).at(0)) !== null && _modelInfo_filter_at1 !== void 0 ? _modelInfo_filter_at1 : {
28680
- name: embedModel !== null && embedModel !== void 0 ? embedModel : "",
28681
- currency: "usd",
28682
- promptTokenCostPer1M: 0,
28683
- completionTokenCostPer1M: 0
28684
- };
28685
- }
28686
28671
  this.setOptions(options);
28687
28672
  }
28688
28673
  _create_class$c(AxBaseAI, [
@@ -28722,21 +28707,27 @@ var AxBaseAI = /*#__PURE__*/ function() {
28722
28707
  }
28723
28708
  },
28724
28709
  {
28725
- key: "setModelMap",
28726
- value: function setModelMap(modelMap) {
28727
- this.modelMap = modelMap;
28728
- }
28729
- },
28730
- {
28731
- key: "setEmbedModelMap",
28732
- value: function setEmbedModelMap(embedModelMap) {
28733
- this.embedModelMap = embedModelMap;
28710
+ key: "_getModelInfo",
28711
+ value: function _getModelInfo(model) {
28712
+ var _this_modelMap;
28713
+ var _this_modelMap_model;
28714
+ var _model = (_this_modelMap_model = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[model]) !== null && _this_modelMap_model !== void 0 ? _this_modelMap_model : model;
28715
+ var modelName = _model.replace(/-0\d+$|-\d{2,}$/, "");
28716
+ var _this_modelInfo_filter_at;
28717
+ return (_this_modelInfo_filter_at = this.modelInfo.filter(function(v) {
28718
+ return v.name === modelName;
28719
+ }).at(0)) !== null && _this_modelInfo_filter_at !== void 0 ? _this_modelInfo_filter_at : {
28720
+ name: model,
28721
+ currency: "usd",
28722
+ promptTokenCostPer1M: 0,
28723
+ completionTokenCostPer1M: 0
28724
+ };
28734
28725
  }
28735
28726
  },
28736
28727
  {
28737
28728
  key: "getModelInfo",
28738
28729
  value: function getModelInfo() {
28739
- return _object_spread_props$8(_object_spread$h({}, this.modelInfo), {
28730
+ return _object_spread_props$8(_object_spread$h({}, this._getModelInfo(this.models.model)), {
28740
28731
  provider: this.name
28741
28732
  });
28742
28733
  }
@@ -28744,9 +28735,17 @@ var AxBaseAI = /*#__PURE__*/ function() {
28744
28735
  {
28745
28736
  key: "getEmbedModelInfo",
28746
28737
  value: function getEmbedModelInfo() {
28747
- return this.embedModelInfo ? _object_spread_props$8(_object_spread$h({}, this.embedModelInfo), {
28748
- provider: this.name
28749
- }) : undefined;
28738
+ if (this.models.embedModel) {
28739
+ return _object_spread_props$8(_object_spread$h({}, this._getModelInfo(this.models.embedModel)), {
28740
+ provider: this.name
28741
+ });
28742
+ }
28743
+ }
28744
+ },
28745
+ {
28746
+ key: "getModelMap",
28747
+ value: function getModelMap() {
28748
+ return this.modelMap;
28750
28749
  }
28751
28750
  },
28752
28751
  {
@@ -28769,13 +28768,14 @@ var AxBaseAI = /*#__PURE__*/ function() {
28769
28768
  },
28770
28769
  {
28771
28770
  key: "chat",
28772
- value: function chat(_req, options) {
28771
+ value: function chat(req, options) {
28773
28772
  var _this = this;
28774
28773
  return _async_to_generator$7(function() {
28775
- var _mc_stopSequences, _this_tracer, mc, _obj;
28774
+ var _this_modelMap, _this_modelMap_req_model, model, _mc_stopSequences, _this_tracer, mc, _obj;
28776
28775
  return _ts_generator$8(this, function(_state) {
28777
28776
  switch(_state.label){
28778
28777
  case 0:
28778
+ model = req.model ? (_this_modelMap_req_model = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.model]) !== null && _this_modelMap_req_model !== void 0 ? _this_modelMap_req_model : req.model : _this.models.model;
28779
28779
  if (!_this.tracer) return [
28780
28780
  3,
28781
28781
  2
@@ -28785,7 +28785,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28785
28785
  4,
28786
28786
  (_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Chat Request", {
28787
28787
  kind: AxSpanKind.SERVER,
28788
- attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.modelInfo.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MAX_TOKENS, mc.maxTokens), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TEMPERATURE, mc.temperature), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_P, mc.topP), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_K, mc.topK), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_FREQUENCY_PENALTY, mc.frequencyPenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_PRESENCE_PENALTY, mc.presencePenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_STOP_SEQUENCES, (_mc_stopSequences = mc.stopSequences) === null || _mc_stopSequences === void 0 ? void 0 : _mc_stopSequences.join(", ")), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING, mc.stream), _obj)
28788
+ attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, model), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MAX_TOKENS, mc.maxTokens), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TEMPERATURE, mc.temperature), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_P, mc.topP), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_K, mc.topK), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_FREQUENCY_PENALTY, mc.frequencyPenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_PRESENCE_PENALTY, mc.presencePenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_STOP_SEQUENCES, (_mc_stopSequences = mc.stopSequences) === null || _mc_stopSequences === void 0 ? void 0 : _mc_stopSequences.join(", ")), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING, mc.stream), _obj)
28789
28789
  }, function() {
28790
28790
  var _ref = _async_to_generator$7(function(span) {
28791
28791
  var res;
@@ -28794,7 +28794,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28794
28794
  case 0:
28795
28795
  return [
28796
28796
  4,
28797
- _this._chat(_req, options, span)
28797
+ _this._chat(model, req, options, span)
28798
28798
  ];
28799
28799
  case 1:
28800
28800
  res = _state.sent();
@@ -28819,7 +28819,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28819
28819
  case 2:
28820
28820
  return [
28821
28821
  4,
28822
- _this._chat(_req, options)
28822
+ _this._chat(model, req, options)
28823
28823
  ];
28824
28824
  case 3:
28825
28825
  return [
@@ -28833,10 +28833,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
28833
28833
  },
28834
28834
  {
28835
28835
  key: "_chat",
28836
- value: function _chat(_req, options, span) {
28836
+ value: function _chat(model, chatReq, options, span) {
28837
28837
  var _this = this;
28838
28838
  return _async_to_generator$7(function() {
28839
- var _req_modelConfig, reqFn, _options_stream, stream, functions, req, fn, rv, _tmp, respFn, wrappedRespFn, doneCb, st, res;
28839
+ var _chatReq_modelConfig, reqFn, _options_stream, stream, functions, req, fn, rv, _tmp, respFn, wrappedRespFn, doneCb, st, res;
28840
28840
  return _ts_generator$8(this, function(_state) {
28841
28841
  switch(_state.label){
28842
28842
  case 0:
@@ -28844,11 +28844,14 @@ var AxBaseAI = /*#__PURE__*/ function() {
28844
28844
  throw new Error("generateChatReq not implemented");
28845
28845
  }
28846
28846
  reqFn = _this.generateChatReq;
28847
- stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (_req_modelConfig = _req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream;
28848
- functions = _req.functions && _req.functions.length > 0 ? _req.functions : undefined;
28849
- req = _object_spread_props$8(_object_spread$h({}, _req), {
28847
+ stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (_chatReq_modelConfig = chatReq.modelConfig) === null || _chatReq_modelConfig === void 0 ? void 0 : _chatReq_modelConfig.stream;
28848
+ if (chatReq.functions && chatReq.functions.length > 0) {
28849
+ functions = chatReq.functions;
28850
+ }
28851
+ req = _object_spread_props$8(_object_spread$h({}, chatReq), {
28852
+ model: model,
28850
28853
  functions: functions,
28851
- modelConfig: _object_spread_props$8(_object_spread$h({}, _req.modelConfig), {
28854
+ modelConfig: _object_spread_props$8(_object_spread$h({}, chatReq.modelConfig), {
28852
28855
  stream: stream
28853
28856
  })
28854
28857
  });
@@ -28985,10 +28988,14 @@ var AxBaseAI = /*#__PURE__*/ function() {
28985
28988
  value: function embed(req, options) {
28986
28989
  var _this = this;
28987
28990
  return _async_to_generator$7(function() {
28988
- var _this_tracer, _obj;
28991
+ var _this_modelMap, _this_modelMap_req_embedModel, embedModel, _this_tracer, _req_embedModel, _obj;
28989
28992
  return _ts_generator$8(this, function(_state) {
28990
28993
  switch(_state.label){
28991
28994
  case 0:
28995
+ embedModel = req.embedModel ? (_this_modelMap_req_embedModel = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.embedModel]) !== null && _this_modelMap_req_embedModel !== void 0 ? _this_modelMap_req_embedModel : req.embedModel : _this.models.embedModel;
28996
+ if (!embedModel) {
28997
+ throw new Error("No embed model defined");
28998
+ }
28992
28999
  if (!_this.tracer) return [
28993
29000
  3,
28994
29001
  2
@@ -28997,7 +29004,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28997
29004
  4,
28998
29005
  (_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Embed Request", {
28999
29006
  kind: AxSpanKind.SERVER,
29000
- attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.modelInfo.name), _obj)
29007
+ attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, (_req_embedModel = req.embedModel) !== null && _req_embedModel !== void 0 ? _req_embedModel : _this.models.embedModel), _obj)
29001
29008
  }, function() {
29002
29009
  var _ref = _async_to_generator$7(function(span) {
29003
29010
  var res;
@@ -29006,7 +29013,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
29006
29013
  case 0:
29007
29014
  return [
29008
29015
  4,
29009
- _this._embed(req, options, span)
29016
+ _this._embed(embedModel, req, options, span)
29010
29017
  ];
29011
29018
  case 1:
29012
29019
  res = _state.sent();
@@ -29031,7 +29038,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
29031
29038
  case 2:
29032
29039
  return [
29033
29040
  2,
29034
- _this._embed(req, options)
29041
+ _this._embed(embedModel, req, options)
29035
29042
  ];
29036
29043
  }
29037
29044
  });
@@ -29040,10 +29047,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
29040
29047
  },
29041
29048
  {
29042
29049
  key: "_embed",
29043
- value: function _embed(req, options, span) {
29050
+ value: function _embed(embedModel, embedReq, options, span) {
29044
29051
  var _this = this;
29045
29052
  return _async_to_generator$7(function() {
29046
- var fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
29053
+ var req, fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
29047
29054
  return _ts_generator$8(this, function(_state) {
29048
29055
  switch(_state.label){
29049
29056
  case 0:
@@ -29053,6 +29060,9 @@ var AxBaseAI = /*#__PURE__*/ function() {
29053
29060
  if (!_this.generateEmbedResp) {
29054
29061
  throw new Error("generateEmbedResp not implemented");
29055
29062
  }
29063
+ req = _object_spread_props$8(_object_spread$h({}, embedReq), {
29064
+ embedModel: embedModel
29065
+ });
29056
29066
  fn = function() {
29057
29067
  var _ref = _async_to_generator$7(function() {
29058
29068
  var _this_generateEmbedReq, apiConfig, reqValue, res;
@@ -29134,22 +29144,6 @@ var AxBaseAI = /*#__PURE__*/ function() {
29134
29144
  var headers = arguments.length > 0 && arguments[0] !== void 0 ? arguments[0] : {};
29135
29145
  return _object_spread$h({}, headers, this.headers);
29136
29146
  }
29137
- },
29138
- {
29139
- key: "getEmbedModel",
29140
- value: function getEmbedModel(name) {
29141
- var _this_embedModelMap;
29142
- var _this_embedModelMap_name;
29143
- return name ? (_this_embedModelMap_name = (_this_embedModelMap = this.embedModelMap) === null || _this_embedModelMap === void 0 ? void 0 : _this_embedModelMap[name]) !== null && _this_embedModelMap_name !== void 0 ? _this_embedModelMap_name : name : undefined;
29144
- }
29145
- },
29146
- {
29147
- key: "getModel",
29148
- value: function getModel(name) {
29149
- var _this_modelMap;
29150
- var _this_modelMap_name;
29151
- return (_this_modelMap_name = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[name]) !== null && _this_modelMap_name !== void 0 ? _this_modelMap_name : name;
29152
- }
29153
29147
  }
29154
29148
  ]);
29155
29149
  return AxBaseAI;
@@ -29313,6 +29307,7 @@ var AxAIOpenAIModel;
29313
29307
  (function(AxAIOpenAIModel) {
29314
29308
  AxAIOpenAIModel["GPT4"] = "gpt-4";
29315
29309
  AxAIOpenAIModel["GPT4O"] = "gpt-4o";
29310
+ AxAIOpenAIModel["GPT4OMini"] = "gpt-4o-mini";
29316
29311
  AxAIOpenAIModel["GPT4Turbo"] = "gpt-4-turbo";
29317
29312
  AxAIOpenAIModel["GPT35Turbo"] = "gpt-3.5-turbo";
29318
29313
  AxAIOpenAIModel["GPT35TurboInstruct"] = "gpt-3.5-turbo-instruct";
@@ -29343,6 +29338,12 @@ var AxAIOpenAIEmbedModel;
29343
29338
  promptTokenCostPer1M: 5,
29344
29339
  completionTokenCostPer1M: 15
29345
29340
  },
29341
+ {
29342
+ name: AxAIOpenAIModel.GPT4OMini,
29343
+ currency: "usd",
29344
+ promptTokenCostPer1M: 0.15,
29345
+ completionTokenCostPer1M: 0.6
29346
+ },
29346
29347
  {
29347
29348
  name: AxAIOpenAIModel.GPT4Turbo,
29348
29349
  currency: "usd",
@@ -29498,7 +29499,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29498
29499
  _inherits$f(AxAIOpenAI, AxBaseAI);
29499
29500
  var _super = _create_super$f(AxAIOpenAI);
29500
29501
  function AxAIOpenAI(param) {
29501
- var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo;
29502
+ var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo, modelMap = param.modelMap;
29502
29503
  _class_call_check$o(this, AxAIOpenAI);
29503
29504
  var _this;
29504
29505
  if (!apiKey || apiKey === "") {
@@ -29520,14 +29521,16 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29520
29521
  supportFor: {
29521
29522
  functions: true,
29522
29523
  streaming: true
29523
- }
29524
+ },
29525
+ modelMap: modelMap
29524
29526
  });
29525
29527
  _define_property$o(_assert_this_initialized$f(_this), "config", void 0);
29526
29528
  _define_property$o(_assert_this_initialized$f(_this), "streamingUsage", void 0);
29529
+ _define_property$o(_assert_this_initialized$f(_this), "dimensions", void 0);
29527
29530
  _define_property$o(_assert_this_initialized$f(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
29528
29531
  _config) {
29529
29532
  var _req_functions, _req_modelConfig, _req_modelConfig1, _this_config, _this_config1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6, _req_modelConfig7;
29530
- var model = _this.config.model;
29533
+ var model = req.model;
29531
29534
  if (!req.chatPrompt || req.chatPrompt.length === 0) {
29532
29535
  throw new Error("Chat prompt is empty");
29533
29536
  }
@@ -29580,7 +29583,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29580
29583
  ];
29581
29584
  });
29582
29585
  _define_property$o(_assert_this_initialized$f(_this), "generateEmbedReq", function(req) {
29583
- var model = _this.config.embedModel;
29586
+ var model = req.embedModel;
29584
29587
  if (!model) {
29585
29588
  throw new Error("Embed model not set");
29586
29589
  }
@@ -29592,7 +29595,8 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29592
29595
  };
29593
29596
  var reqValue = {
29594
29597
  model: model,
29595
- input: req.texts
29598
+ input: req.texts,
29599
+ dimensions: _this.dimensions
29596
29600
  };
29597
29601
  return [
29598
29602
  apiConfig,
@@ -29701,6 +29705,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29701
29705
  _this.config = _config;
29702
29706
  var _options_streamingUsage;
29703
29707
  _this.streamingUsage = (_options_streamingUsage = options === null || options === void 0 ? void 0 : options.streamingUsage) !== null && _options_streamingUsage !== void 0 ? _options_streamingUsage : true;
29708
+ _this.dimensions = config === null || config === void 0 ? void 0 : config.dimensions;
29704
29709
  return _this;
29705
29710
  }
29706
29711
  _create_class$b(AxAIOpenAI, [
@@ -29934,7 +29939,7 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
29934
29939
  _inherits$e(AxAIAzureOpenAI, AxAIOpenAI);
29935
29940
  var _super = _create_super$e(AxAIAzureOpenAI);
29936
29941
  function AxAIAzureOpenAI(param) {
29937
- var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options;
29942
+ var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options, modelMap = param.modelMap;
29938
29943
  _class_call_check$n(this, AxAIAzureOpenAI);
29939
29944
  if (!apiKey || apiKey === "") {
29940
29945
  throw new Error("Azure OpenAPI API key not set");
@@ -29949,7 +29954,8 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
29949
29954
  var _this = _super.call(this, {
29950
29955
  apiKey: apiKey,
29951
29956
  config: _config,
29952
- options: options
29957
+ options: options,
29958
+ modelMap: modelMap
29953
29959
  });
29954
29960
  var host = resourceName.includes("://") ? resourceName : "https://".concat(resourceName, ".openai.azure.com/");
29955
29961
  _get$5((_assert_this_initialized$e(_this), _get_prototype_of$e(AxAIAzureOpenAI.prototype)), "setName", _this).call(_this, "Azure OpenAI");
@@ -30095,7 +30101,7 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
30095
30101
  _inherits$d(AxAIHuggingFace, AxBaseAI);
30096
30102
  var _super = _create_super$d(AxAIHuggingFace);
30097
30103
  function AxAIHuggingFace(param) {
30098
- var apiKey = param.apiKey, config = param.config, options = param.options;
30104
+ var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30099
30105
  _class_call_check$m(this, AxAIHuggingFace);
30100
30106
  var _this;
30101
30107
  if (!apiKey || apiKey === "") {
@@ -30116,13 +30122,14 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
30116
30122
  supportFor: {
30117
30123
  functions: false,
30118
30124
  streaming: false
30119
- }
30125
+ },
30126
+ modelMap: modelMap
30120
30127
  });
30121
30128
  _define_property$m(_assert_this_initialized$d(_this), "config", void 0);
30122
30129
  _define_property$m(_assert_this_initialized$d(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
30123
30130
  _config) {
30124
30131
  var _req_chatPrompt, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4;
30125
- var model = _this.config.model;
30132
+ var model = req.model;
30126
30133
  var functionsList = req.functions ? "Functions:\n".concat(JSON.stringify(req.functions, null, 2), "\n") : "";
30127
30134
  var prompt = (_req_chatPrompt = req.chatPrompt) === null || _req_chatPrompt === void 0 ? void 0 : _req_chatPrompt.map(function(msg) {
30128
30135
  switch(msg.role){
@@ -30344,7 +30351,7 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
30344
30351
  _inherits$c(AxAITogether, AxAIOpenAI);
30345
30352
  var _super = _create_super$c(AxAITogether);
30346
30353
  function AxAITogether(param) {
30347
- var apiKey = param.apiKey, config = param.config, options = param.options;
30354
+ var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30348
30355
  _class_call_check$l(this, AxAITogether);
30349
30356
  if (!apiKey || apiKey === "") {
30350
30357
  throw new Error("Together API key not set");
@@ -30355,7 +30362,8 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
30355
30362
  config: _config,
30356
30363
  options: options,
30357
30364
  apiURL: "https://api.together.xyz/v1",
30358
- modelInfo: axModelInfoTogether
30365
+ modelInfo: axModelInfoTogether,
30366
+ modelMap: modelMap
30359
30367
  });
30360
30368
  _get$4((_assert_this_initialized$c(_this), _get_prototype_of$c(AxAITogether.prototype)), "setName", _this).call(_this, "Together");
30361
30369
  return _this;
@@ -30629,7 +30637,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30629
30637
  _inherits$b(AxAICohere, AxBaseAI);
30630
30638
  var _super = _create_super$b(AxAICohere);
30631
30639
  function AxAICohere(param) {
30632
- var apiKey = param.apiKey, config = param.config, options = param.options;
30640
+ var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30633
30641
  _class_call_check$k(this, AxAICohere);
30634
30642
  var _this;
30635
30643
  if (!apiKey || apiKey === "") {
@@ -30650,13 +30658,14 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30650
30658
  functions: true,
30651
30659
  streaming: true
30652
30660
  },
30653
- options: options
30661
+ options: options,
30662
+ modelMap: modelMap
30654
30663
  });
30655
30664
  _define_property$k(_assert_this_initialized$b(_this), "config", void 0);
30656
30665
  _define_property$k(_assert_this_initialized$b(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
30657
30666
  _config) {
30658
30667
  var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6;
30659
- var model = _this.config.model;
30668
+ var model = req.model;
30660
30669
  var lastChatMsg = req.chatPrompt.at(-1);
30661
30670
  var restOfChat = req.chatPrompt.slice(0, -1);
30662
30671
  var message;
@@ -30751,7 +30760,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30751
30760
  ];
30752
30761
  });
30753
30762
  _define_property$k(_assert_this_initialized$b(_this), "generateEmbedReq", function(req) {
30754
- var model = _this.config.embedModel;
30763
+ var model = req.embedModel;
30755
30764
  if (!model) {
30756
30765
  throw new Error("Embed model not set");
30757
30766
  }
@@ -31150,7 +31159,7 @@ var safetySettings = [
31150
31159
  _inherits$a(AxAIGoogleGemini, AxBaseAI);
31151
31160
  var _super = _create_super$a(AxAIGoogleGemini);
31152
31161
  function AxAIGoogleGemini(param) {
31153
- var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options;
31162
+ var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options, modelMap = param.modelMap;
31154
31163
  _class_call_check$j(this, AxAIGoogleGemini);
31155
31164
  var _this;
31156
31165
  if (!apiKey || apiKey === "") {
@@ -31174,14 +31183,15 @@ var safetySettings = [
31174
31183
  supportFor: {
31175
31184
  functions: true,
31176
31185
  streaming: true
31177
- }
31186
+ },
31187
+ modelMap: modelMap
31178
31188
  });
31179
31189
  _define_property$j(_assert_this_initialized$a(_this), "options", void 0);
31180
31190
  _define_property$j(_assert_this_initialized$a(_this), "config", void 0);
31181
31191
  _define_property$j(_assert_this_initialized$a(_this), "apiKey", void 0);
31182
31192
  _define_property$j(_assert_this_initialized$a(_this), "generateChatReq", function(req) {
31183
31193
  var _req_modelConfig, _this_options, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
31184
- var model = _this.config.model;
31194
+ var model = req.model;
31185
31195
  var _req_modelConfig_stream;
31186
31196
  var stream = (_req_modelConfig_stream = (_req_modelConfig = req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream) !== null && _req_modelConfig_stream !== void 0 ? _req_modelConfig_stream : _this.config.stream;
31187
31197
  if (!req.chatPrompt || req.chatPrompt.length === 0) {
@@ -31361,7 +31371,7 @@ var safetySettings = [
31361
31371
  ];
31362
31372
  });
31363
31373
  _define_property$j(_assert_this_initialized$a(_this), "generateEmbedReq", function(req) {
31364
- var model = _this.config.embedModel;
31374
+ var model = req.embedModel;
31365
31375
  if (!model) {
31366
31376
  throw new Error("Embed model not set");
31367
31377
  }
@@ -31725,7 +31735,7 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
31725
31735
  _inherits$9(AxAIAnthropic, AxBaseAI);
31726
31736
  var _super = _create_super$9(AxAIAnthropic);
31727
31737
  function AxAIAnthropic(param) {
31728
- var apiKey = param.apiKey, config = param.config, options = param.options;
31738
+ var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
31729
31739
  _class_call_check$i(this, AxAIAnthropic);
31730
31740
  var _this;
31731
31741
  if (!apiKey || apiKey === "") {
@@ -31747,12 +31757,13 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
31747
31757
  supportFor: {
31748
31758
  functions: true,
31749
31759
  streaming: true
31750
- }
31760
+ },
31761
+ modelMap: modelMap
31751
31762
  });
31752
31763
  _define_property$i(_assert_this_initialized$9(_this), "config", void 0);
31753
31764
  _define_property$i(_assert_this_initialized$9(_this), "generateChatReq", function(req) {
31754
31765
  var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
31755
- var model = _this.config.model;
31766
+ var model = req.model;
31756
31767
  var apiConfig = {
31757
31768
  name: "/messages"
31758
31769
  };
@@ -32649,7 +32660,7 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32649
32660
  _inherits$8(AxAIGroq, AxAIOpenAI);
32650
32661
  var _super = _create_super$8(AxAIGroq);
32651
32662
  function AxAIGroq(param) {
32652
- var apiKey = param.apiKey, config = param.config, options = param.options;
32663
+ var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
32653
32664
  _class_call_check$g(this, AxAIGroq);
32654
32665
  var _this;
32655
32666
  if (!apiKey || apiKey === "") {
@@ -32664,7 +32675,8 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32664
32675
  config: _config,
32665
32676
  options: _options,
32666
32677
  apiURL: "https://api.groq.com/openai/v1",
32667
- modelInfo: []
32678
+ modelInfo: [],
32679
+ modelMap: modelMap
32668
32680
  });
32669
32681
  _define_property$g(_assert_this_initialized$8(_this), "setOptions", function(options) {
32670
32682
  var rateLimiter = _this.newRateLimiter(options);
@@ -32720,19 +32732,23 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32720
32732
  }
32721
32733
  (AxAIOpenAI);
32722
32734
 
32735
+ // cspell:ignore mistral, mixtral, codestral, nemo
32723
32736
  var AxAIMistralModel;
32724
32737
  (function(AxAIMistralModel) {
32725
32738
  AxAIMistralModel["Mistral7B"] = "open-mistral-7b";
32726
32739
  AxAIMistralModel["Mistral8x7B"] = "open-mixtral-8x7b";
32727
32740
  AxAIMistralModel["MistralSmall"] = "mistral-small-latest";
32728
- AxAIMistralModel["MistralMedium"] = "mistral-medium-latest";
32729
32741
  AxAIMistralModel["MistralLarge"] = "mistral-large-latest";
32742
+ AxAIMistralModel["Codestral"] = "codestral-latest";
32743
+ AxAIMistralModel["OpenCodestralMamba"] = "open-codestral-mamba";
32744
+ AxAIMistralModel["OpenMistralNemo"] = "open-mistral-nemo-latest";
32730
32745
  })(AxAIMistralModel || (AxAIMistralModel = {}));
32731
32746
  var AxAIMistralEmbedModels;
32732
32747
  (function(AxAIMistralEmbedModels) {
32733
32748
  AxAIMistralEmbedModels["MistralEmbed"] = "mistral-embed";
32734
32749
  })(AxAIMistralEmbedModels || (AxAIMistralEmbedModels = {}));
32735
32750
 
32751
+ // cspell:ignore mistral, mixtral, codestral, nemo
32736
32752
  var axModelInfoMistral = [
32737
32753
  {
32738
32754
  name: AxAIMistralModel.Mistral7B,
@@ -32752,17 +32768,29 @@ var axModelInfoMistral = [
32752
32768
  promptTokenCostPer1M: 2,
32753
32769
  completionTokenCostPer1M: 6
32754
32770
  },
32755
- {
32756
- name: AxAIMistralModel.MistralMedium,
32757
- currency: "USD",
32758
- promptTokenCostPer1M: 2.7,
32759
- completionTokenCostPer1M: 8.1
32760
- },
32761
32771
  {
32762
32772
  name: AxAIMistralModel.MistralLarge,
32763
32773
  currency: "USD",
32764
32774
  promptTokenCostPer1M: 8,
32765
32775
  completionTokenCostPer1M: 24
32776
+ },
32777
+ {
32778
+ name: AxAIMistralModel.Codestral,
32779
+ currency: "USD",
32780
+ promptTokenCostPer1M: 1,
32781
+ completionTokenCostPer1M: 3
32782
+ },
32783
+ {
32784
+ name: AxAIMistralModel.OpenCodestralMamba,
32785
+ currency: "USD",
32786
+ promptTokenCostPer1M: 0.25,
32787
+ completionTokenCostPer1M: 0.25
32788
+ },
32789
+ {
32790
+ name: AxAIMistralModel.OpenMistralNemo,
32791
+ currency: "USD",
32792
+ promptTokenCostPer1M: 0.3,
32793
+ completionTokenCostPer1M: 0.3
32766
32794
  }
32767
32795
  ];
32768
32796
 
@@ -32897,7 +32925,7 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
32897
32925
  _inherits$7(AxAIMistral, AxAIOpenAI);
32898
32926
  var _super = _create_super$7(AxAIMistral);
32899
32927
  function AxAIMistral(param) {
32900
- var apiKey = param.apiKey, config = param.config, options = param.options;
32928
+ var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
32901
32929
  _class_call_check$f(this, AxAIMistral);
32902
32930
  if (!apiKey || apiKey === "") {
32903
32931
  throw new Error("Mistral API key not set");
@@ -32908,7 +32936,8 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
32908
32936
  config: _config,
32909
32937
  options: options,
32910
32938
  apiURL: "https://api.mistral.ai/v1",
32911
- modelInfo: axModelInfoMistral
32939
+ modelInfo: axModelInfoMistral,
32940
+ modelMap: modelMap
32912
32941
  });
32913
32942
  _get$2((_assert_this_initialized$7(_this), _get_prototype_of$7(AxAIMistral.prototype)), "setName", _this).call(_this, "Mistral");
32914
32943
  return _this;
@@ -33072,7 +33101,7 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
33072
33101
  _inherits$6(AxAIDeepSeek, AxAIOpenAI);
33073
33102
  var _super = _create_super$6(AxAIDeepSeek);
33074
33103
  function AxAIDeepSeek(param) {
33075
- var apiKey = param.apiKey, config = param.config, options = param.options;
33104
+ var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
33076
33105
  _class_call_check$e(this, AxAIDeepSeek);
33077
33106
  if (!apiKey || apiKey === "") {
33078
33107
  throw new Error("DeepSeek API key not set");
@@ -33083,7 +33112,8 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
33083
33112
  config: _config,
33084
33113
  options: options,
33085
33114
  apiURL: "https://api.deepseek.com",
33086
- modelInfo: axModelInfoDeepSeek
33115
+ modelInfo: axModelInfoDeepSeek,
33116
+ modelMap: modelMap
33087
33117
  });
33088
33118
  _get$1((_assert_this_initialized$6(_this), _get_prototype_of$6(AxAIDeepSeek.prototype)), "setName", _this).call(_this, "DeepSeek");
33089
33119
  return _this;
@@ -33251,14 +33281,15 @@ var axAIOllamaDefaultConfig = function() {
33251
33281
  _inherits$5(AxAIOllama, AxAIOpenAI);
33252
33282
  var _super = _create_super$5(AxAIOllama);
33253
33283
  function AxAIOllama(param) {
33254
- var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options;
33284
+ var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options, modelMap = param.modelMap;
33255
33285
  _class_call_check$d(this, AxAIOllama);
33256
33286
  var _config = _object_spread$6({}, axAIOllamaDefaultConfig(), config);
33257
33287
  var _this = _super.call(this, {
33258
33288
  apiKey: apiKey,
33259
33289
  options: options,
33260
33290
  config: _config,
33261
- apiURL: new URL("/v1", url).href
33291
+ apiURL: new URL("/api", url).href,
33292
+ modelMap: modelMap
33262
33293
  });
33263
33294
  _get((_assert_this_initialized$5(_this), _get_prototype_of$5(AxAIOllama.prototype)), "setName", _this).call(_this, "Ollama");
33264
33295
  return _this;
@@ -33466,18 +33497,6 @@ var AxAI = /*#__PURE__*/ function() {
33466
33497
  }
33467
33498
  }
33468
33499
  _create_class$5(AxAI, [
33469
- {
33470
- key: "setModelMap",
33471
- value: function setModelMap(modelMap) {
33472
- this.ai.setModelMap(modelMap);
33473
- }
33474
- },
33475
- {
33476
- key: "setEmbedModelMap",
33477
- value: function setEmbedModelMap(modelMap) {
33478
- this.ai.setEmbedModelMap(modelMap);
33479
- }
33480
- },
33481
33500
  {
33482
33501
  key: "getName",
33483
33502
  value: function getName() {
@@ -33508,6 +33527,12 @@ var AxAI = /*#__PURE__*/ function() {
33508
33527
  return this.ai.getFeatures();
33509
33528
  }
33510
33529
  },
33530
+ {
33531
+ key: "getModelMap",
33532
+ value: function getModelMap() {
33533
+ return this.ai.getModelMap();
33534
+ }
33535
+ },
33511
33536
  {
33512
33537
  key: "chat",
33513
33538
  value: function chat(req, options) {
@@ -34066,8 +34091,8 @@ var assertRequiredFields = function(sig, values) {
34066
34091
  });
34067
34092
  if (missingFields.length > 0) {
34068
34093
  throw new AxAssertionError({
34069
- message: "Missing required fields: ".concat(missingFields.map(function(f) {
34070
- return f.name;
34094
+ message: "Output must include: t: ".concat(missingFields.map(function(f) {
34095
+ return "`".concat(f.title, ":`");
34071
34096
  }).join(", ")),
34072
34097
  values: values
34073
34098
  });
@@ -38873,7 +38898,7 @@ var AxPromptTemplate = function AxPromptTemplate(sig, fieldTemplates) {
38873
38898
  this.outputFormat = {
38874
38899
  type: "text",
38875
38900
  text: [
38876
- "Follow the following format."
38901
+ "Use the following output format."
38877
38902
  ].concat(_to_consumable_array$2(this.renderOutFields(this.sig.getOutputFields())), [
38878
38903
  "---\n\n"
38879
38904
  ]).join("\n\n")