@digipair/skill-dsp 0.21.15 → 0.22.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.cjs.js CHANGED
@@ -20358,7 +20358,6 @@ about the parse state.
20358
20358
  var _a;
20359
20359
  var depth = action >> 19 /* Action.ReduceDepthShift */ , type = action & 65535 /* Action.ValueMask */ ;
20360
20360
  var parser = this.p.parser;
20361
- if (this.reducePos < this.pos - 25 /* Lookahead.Margin */ ) this.setLookAhead(this.pos);
20362
20361
  var dPrec = parser.dynamicPrecedence(type);
20363
20362
  if (dPrec) this.score += dPrec;
20364
20363
  if (depth == 0) {
@@ -21311,7 +21310,7 @@ function cutAt(tree, pos, side) {
21311
21310
  cursor.moveTo(pos);
21312
21311
  for(;;){
21313
21312
  if (!(side < 0 ? cursor.childBefore(pos) : cursor.childAfter(pos))) for(;;){
21314
- if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Lookahead.Margin */ )) : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Lookahead.Margin */ ));
21313
+ if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Safety.Margin */ )) : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Safety.Margin */ ));
21315
21314
  if (side < 0 ? cursor.prevSibling() : cursor.nextSibling()) break;
21316
21315
  if (!cursor.parent()) return side < 0 ? 0 : tree.length;
21317
21316
  }
@@ -21434,7 +21433,7 @@ var TokenCache = /*#__PURE__*/ function() {
21434
21433
  token.mask = mask;
21435
21434
  token.context = context;
21436
21435
  }
21437
- if (token.lookAhead > token.end + 25 /* Lookahead.Margin */ ) lookAhead = Math.max(token.lookAhead, lookAhead);
21436
+ if (token.lookAhead > token.end + 25 /* Safety.Margin */ ) lookAhead = Math.max(token.lookAhead, lookAhead);
21438
21437
  if (token.value != 0 /* Term.Err */ ) {
21439
21438
  var startIndex = actionIndex;
21440
21439
  if (token.extended > -1) actionIndex = this.addActions(stack, token.extended, token.end, actionIndex);
@@ -24375,7 +24374,6 @@ var parser = LRParser.deserialize({
24375
24374
  },
24376
24375
  dynamicPrecedences: {
24377
24376
  "31": -1,
24378
- "67": 1,
24379
24377
  "71": -1,
24380
24378
  "73": -1
24381
24379
  },
@@ -28637,7 +28635,7 @@ var axBaseAIDefaultConfig = function() {
28637
28635
  };
28638
28636
  var AxBaseAI = /*#__PURE__*/ function() {
28639
28637
  function AxBaseAI(param) {
28640
- var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor, modelMap = param.modelMap;
28638
+ var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor;
28641
28639
  _class_call_check$p(this, AxBaseAI);
28642
28640
  _define_property$p(this, "generateChatReq", void 0);
28643
28641
  _define_property$p(this, "generateEmbedReq", void 0);
@@ -28649,29 +28647,48 @@ var AxBaseAI = /*#__PURE__*/ function() {
28649
28647
  _define_property$p(this, "fetch", void 0);
28650
28648
  _define_property$p(this, "tracer", void 0);
28651
28649
  _define_property$p(this, "modelMap", void 0);
28652
- _define_property$p(this, "modelInfo", void 0);
28650
+ _define_property$p(this, "embedModelMap", void 0);
28653
28651
  _define_property$p(this, "modelUsage", void 0);
28654
28652
  _define_property$p(this, "embedModelUsage", void 0);
28655
- _define_property$p(this, "models", void 0);
28656
28653
  _define_property$p(this, "apiURL", void 0);
28657
28654
  _define_property$p(this, "name", void 0);
28658
28655
  _define_property$p(this, "headers", void 0);
28656
+ _define_property$p(this, "modelInfo", void 0);
28657
+ _define_property$p(this, "embedModelInfo", void 0);
28659
28658
  _define_property$p(this, "supportFor", void 0);
28660
28659
  this.name = name;
28661
28660
  this.apiURL = apiURL;
28662
28661
  this.headers = headers;
28663
28662
  this.supportFor = supportFor;
28664
28663
  this.tracer = options.tracer;
28665
- this.modelInfo = modelInfo;
28666
- this.modelMap = modelMap;
28667
- var _modelMap_models_model, _models_embedModel, _modelMap_;
28668
- this.models = {
28669
- model: (_modelMap_models_model = modelMap === null || modelMap === void 0 ? void 0 : modelMap[models.model]) !== null && _modelMap_models_model !== void 0 ? _modelMap_models_model : models.model,
28670
- embedModel: (_modelMap_ = modelMap === null || modelMap === void 0 ? void 0 : modelMap[(_models_embedModel = models.embedModel) !== null && _models_embedModel !== void 0 ? _models_embedModel : ""]) !== null && _modelMap_ !== void 0 ? _modelMap_ : models.embedModel
28671
- };
28672
- if (!models.model || typeof models.model !== "string" || models.model === "") {
28664
+ var model = this.getModel(models.model);
28665
+ var embedModel = this.getEmbedModel(models.embedModel);
28666
+ if (typeof model === "string") {
28667
+ var modelName = model.replace(/-0\d+$|-\d{2,}$/, "");
28668
+ var _modelInfo_filter_at;
28669
+ this.modelInfo = (_modelInfo_filter_at = modelInfo.filter(function(v) {
28670
+ return v.name === modelName;
28671
+ }).at(0)) !== null && _modelInfo_filter_at !== void 0 ? _modelInfo_filter_at : {
28672
+ name: model,
28673
+ currency: "usd",
28674
+ promptTokenCostPer1M: 0,
28675
+ completionTokenCostPer1M: 0
28676
+ };
28677
+ } else {
28673
28678
  throw new Error("No model defined");
28674
28679
  }
28680
+ if (typeof embedModel === "string") {
28681
+ var embedModelName = embedModel === null || embedModel === void 0 ? void 0 : embedModel.replace(/-0\d+$|-\d{2,}$/, "");
28682
+ var _modelInfo_filter_at1;
28683
+ this.embedModelInfo = (_modelInfo_filter_at1 = modelInfo.filter(function(v) {
28684
+ return v.name === embedModelName;
28685
+ }).at(0)) !== null && _modelInfo_filter_at1 !== void 0 ? _modelInfo_filter_at1 : {
28686
+ name: embedModel !== null && embedModel !== void 0 ? embedModel : "",
28687
+ currency: "usd",
28688
+ promptTokenCostPer1M: 0,
28689
+ completionTokenCostPer1M: 0
28690
+ };
28691
+ }
28675
28692
  this.setOptions(options);
28676
28693
  }
28677
28694
  _create_class$c(AxBaseAI, [
@@ -28711,27 +28728,21 @@ var AxBaseAI = /*#__PURE__*/ function() {
28711
28728
  }
28712
28729
  },
28713
28730
  {
28714
- key: "_getModelInfo",
28715
- value: function _getModelInfo(model) {
28716
- var _this_modelMap;
28717
- var _this_modelMap_model;
28718
- var _model = (_this_modelMap_model = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[model]) !== null && _this_modelMap_model !== void 0 ? _this_modelMap_model : model;
28719
- var modelName = _model.replace(/-0\d+$|-\d{2,}$/, "");
28720
- var _this_modelInfo_filter_at;
28721
- return (_this_modelInfo_filter_at = this.modelInfo.filter(function(v) {
28722
- return v.name === modelName;
28723
- }).at(0)) !== null && _this_modelInfo_filter_at !== void 0 ? _this_modelInfo_filter_at : {
28724
- name: model,
28725
- currency: "usd",
28726
- promptTokenCostPer1M: 0,
28727
- completionTokenCostPer1M: 0
28728
- };
28731
+ key: "setModelMap",
28732
+ value: function setModelMap(modelMap) {
28733
+ this.modelMap = modelMap;
28734
+ }
28735
+ },
28736
+ {
28737
+ key: "setEmbedModelMap",
28738
+ value: function setEmbedModelMap(embedModelMap) {
28739
+ this.embedModelMap = embedModelMap;
28729
28740
  }
28730
28741
  },
28731
28742
  {
28732
28743
  key: "getModelInfo",
28733
28744
  value: function getModelInfo() {
28734
- return _object_spread_props$8(_object_spread$h({}, this._getModelInfo(this.models.model)), {
28745
+ return _object_spread_props$8(_object_spread$h({}, this.modelInfo), {
28735
28746
  provider: this.name
28736
28747
  });
28737
28748
  }
@@ -28739,17 +28750,9 @@ var AxBaseAI = /*#__PURE__*/ function() {
28739
28750
  {
28740
28751
  key: "getEmbedModelInfo",
28741
28752
  value: function getEmbedModelInfo() {
28742
- if (this.models.embedModel) {
28743
- return _object_spread_props$8(_object_spread$h({}, this._getModelInfo(this.models.embedModel)), {
28744
- provider: this.name
28745
- });
28746
- }
28747
- }
28748
- },
28749
- {
28750
- key: "getModelMap",
28751
- value: function getModelMap() {
28752
- return this.modelMap;
28753
+ return this.embedModelInfo ? _object_spread_props$8(_object_spread$h({}, this.embedModelInfo), {
28754
+ provider: this.name
28755
+ }) : undefined;
28753
28756
  }
28754
28757
  },
28755
28758
  {
@@ -28772,14 +28775,13 @@ var AxBaseAI = /*#__PURE__*/ function() {
28772
28775
  },
28773
28776
  {
28774
28777
  key: "chat",
28775
- value: function chat(req, options) {
28778
+ value: function chat(_req, options) {
28776
28779
  var _this = this;
28777
28780
  return _async_to_generator$7(function() {
28778
- var _this_modelMap, _this_modelMap_req_model, model, _mc_stopSequences, _this_tracer, mc, _obj;
28781
+ var _mc_stopSequences, _this_tracer, mc, _obj;
28779
28782
  return _ts_generator$8(this, function(_state) {
28780
28783
  switch(_state.label){
28781
28784
  case 0:
28782
- model = req.model ? (_this_modelMap_req_model = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.model]) !== null && _this_modelMap_req_model !== void 0 ? _this_modelMap_req_model : req.model : _this.models.model;
28783
28785
  if (!_this.tracer) return [
28784
28786
  3,
28785
28787
  2
@@ -28789,7 +28791,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28789
28791
  4,
28790
28792
  (_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Chat Request", {
28791
28793
  kind: AxSpanKind.SERVER,
28792
- attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, model), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MAX_TOKENS, mc.maxTokens), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TEMPERATURE, mc.temperature), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_P, mc.topP), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_K, mc.topK), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_FREQUENCY_PENALTY, mc.frequencyPenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_PRESENCE_PENALTY, mc.presencePenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_STOP_SEQUENCES, (_mc_stopSequences = mc.stopSequences) === null || _mc_stopSequences === void 0 ? void 0 : _mc_stopSequences.join(", ")), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING, mc.stream), _obj)
28794
+ attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.modelInfo.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MAX_TOKENS, mc.maxTokens), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TEMPERATURE, mc.temperature), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_P, mc.topP), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_K, mc.topK), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_FREQUENCY_PENALTY, mc.frequencyPenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_PRESENCE_PENALTY, mc.presencePenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_STOP_SEQUENCES, (_mc_stopSequences = mc.stopSequences) === null || _mc_stopSequences === void 0 ? void 0 : _mc_stopSequences.join(", ")), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING, mc.stream), _obj)
28793
28795
  }, function() {
28794
28796
  var _ref = _async_to_generator$7(function(span) {
28795
28797
  var res;
@@ -28798,7 +28800,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28798
28800
  case 0:
28799
28801
  return [
28800
28802
  4,
28801
- _this._chat(model, req, options, span)
28803
+ _this._chat(_req, options, span)
28802
28804
  ];
28803
28805
  case 1:
28804
28806
  res = _state.sent();
@@ -28823,7 +28825,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28823
28825
  case 2:
28824
28826
  return [
28825
28827
  4,
28826
- _this._chat(model, req, options)
28828
+ _this._chat(_req, options)
28827
28829
  ];
28828
28830
  case 3:
28829
28831
  return [
@@ -28837,10 +28839,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
28837
28839
  },
28838
28840
  {
28839
28841
  key: "_chat",
28840
- value: function _chat(model, chatReq, options, span) {
28842
+ value: function _chat(_req, options, span) {
28841
28843
  var _this = this;
28842
28844
  return _async_to_generator$7(function() {
28843
- var _chatReq_modelConfig, reqFn, _options_stream, stream, functions, req, fn, rv, _tmp, respFn, wrappedRespFn, doneCb, st, res;
28845
+ var _req_modelConfig, reqFn, _options_stream, stream, functions, req, fn, rv, _tmp, respFn, wrappedRespFn, doneCb, st, res;
28844
28846
  return _ts_generator$8(this, function(_state) {
28845
28847
  switch(_state.label){
28846
28848
  case 0:
@@ -28848,14 +28850,11 @@ var AxBaseAI = /*#__PURE__*/ function() {
28848
28850
  throw new Error("generateChatReq not implemented");
28849
28851
  }
28850
28852
  reqFn = _this.generateChatReq;
28851
- stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (_chatReq_modelConfig = chatReq.modelConfig) === null || _chatReq_modelConfig === void 0 ? void 0 : _chatReq_modelConfig.stream;
28852
- if (chatReq.functions && chatReq.functions.length > 0) {
28853
- functions = chatReq.functions;
28854
- }
28855
- req = _object_spread_props$8(_object_spread$h({}, chatReq), {
28856
- model: model,
28853
+ stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (_req_modelConfig = _req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream;
28854
+ functions = _req.functions && _req.functions.length > 0 ? _req.functions : undefined;
28855
+ req = _object_spread_props$8(_object_spread$h({}, _req), {
28857
28856
  functions: functions,
28858
- modelConfig: _object_spread_props$8(_object_spread$h({}, chatReq.modelConfig), {
28857
+ modelConfig: _object_spread_props$8(_object_spread$h({}, _req.modelConfig), {
28859
28858
  stream: stream
28860
28859
  })
28861
28860
  });
@@ -28992,14 +28991,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
28992
28991
  value: function embed(req, options) {
28993
28992
  var _this = this;
28994
28993
  return _async_to_generator$7(function() {
28995
- var _this_modelMap, _this_modelMap_req_embedModel, embedModel, _this_tracer, _req_embedModel, _obj;
28994
+ var _this_tracer, _obj;
28996
28995
  return _ts_generator$8(this, function(_state) {
28997
28996
  switch(_state.label){
28998
28997
  case 0:
28999
- embedModel = req.embedModel ? (_this_modelMap_req_embedModel = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.embedModel]) !== null && _this_modelMap_req_embedModel !== void 0 ? _this_modelMap_req_embedModel : req.embedModel : _this.models.embedModel;
29000
- if (!embedModel) {
29001
- throw new Error("No embed model defined");
29002
- }
29003
28998
  if (!_this.tracer) return [
29004
28999
  3,
29005
29000
  2
@@ -29008,7 +29003,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
29008
29003
  4,
29009
29004
  (_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Embed Request", {
29010
29005
  kind: AxSpanKind.SERVER,
29011
- attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, (_req_embedModel = req.embedModel) !== null && _req_embedModel !== void 0 ? _req_embedModel : _this.models.embedModel), _obj)
29006
+ attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.modelInfo.name), _obj)
29012
29007
  }, function() {
29013
29008
  var _ref = _async_to_generator$7(function(span) {
29014
29009
  var res;
@@ -29017,7 +29012,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
29017
29012
  case 0:
29018
29013
  return [
29019
29014
  4,
29020
- _this._embed(embedModel, req, options, span)
29015
+ _this._embed(req, options, span)
29021
29016
  ];
29022
29017
  case 1:
29023
29018
  res = _state.sent();
@@ -29042,7 +29037,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
29042
29037
  case 2:
29043
29038
  return [
29044
29039
  2,
29045
- _this._embed(embedModel, req, options)
29040
+ _this._embed(req, options)
29046
29041
  ];
29047
29042
  }
29048
29043
  });
@@ -29051,10 +29046,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
29051
29046
  },
29052
29047
  {
29053
29048
  key: "_embed",
29054
- value: function _embed(embedModel, embedReq, options, span) {
29049
+ value: function _embed(req, options, span) {
29055
29050
  var _this = this;
29056
29051
  return _async_to_generator$7(function() {
29057
- var req, fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
29052
+ var fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
29058
29053
  return _ts_generator$8(this, function(_state) {
29059
29054
  switch(_state.label){
29060
29055
  case 0:
@@ -29064,9 +29059,6 @@ var AxBaseAI = /*#__PURE__*/ function() {
29064
29059
  if (!_this.generateEmbedResp) {
29065
29060
  throw new Error("generateEmbedResp not implemented");
29066
29061
  }
29067
- req = _object_spread_props$8(_object_spread$h({}, embedReq), {
29068
- embedModel: embedModel
29069
- });
29070
29062
  fn = function() {
29071
29063
  var _ref = _async_to_generator$7(function() {
29072
29064
  var _this_generateEmbedReq, apiConfig, reqValue, res;
@@ -29148,6 +29140,22 @@ var AxBaseAI = /*#__PURE__*/ function() {
29148
29140
  var headers = arguments.length > 0 && arguments[0] !== void 0 ? arguments[0] : {};
29149
29141
  return _object_spread$h({}, headers, this.headers);
29150
29142
  }
29143
+ },
29144
+ {
29145
+ key: "getEmbedModel",
29146
+ value: function getEmbedModel(name) {
29147
+ var _this_embedModelMap;
29148
+ var _this_embedModelMap_name;
29149
+ return name ? (_this_embedModelMap_name = (_this_embedModelMap = this.embedModelMap) === null || _this_embedModelMap === void 0 ? void 0 : _this_embedModelMap[name]) !== null && _this_embedModelMap_name !== void 0 ? _this_embedModelMap_name : name : undefined;
29150
+ }
29151
+ },
29152
+ {
29153
+ key: "getModel",
29154
+ value: function getModel(name) {
29155
+ var _this_modelMap;
29156
+ var _this_modelMap_name;
29157
+ return (_this_modelMap_name = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[name]) !== null && _this_modelMap_name !== void 0 ? _this_modelMap_name : name;
29158
+ }
29151
29159
  }
29152
29160
  ]);
29153
29161
  return AxBaseAI;
@@ -29311,7 +29319,6 @@ var AxAIOpenAIModel;
29311
29319
  (function(AxAIOpenAIModel) {
29312
29320
  AxAIOpenAIModel["GPT4"] = "gpt-4";
29313
29321
  AxAIOpenAIModel["GPT4O"] = "gpt-4o";
29314
- AxAIOpenAIModel["GPT4OMini"] = "gpt-4o-mini";
29315
29322
  AxAIOpenAIModel["GPT4Turbo"] = "gpt-4-turbo";
29316
29323
  AxAIOpenAIModel["GPT35Turbo"] = "gpt-3.5-turbo";
29317
29324
  AxAIOpenAIModel["GPT35TurboInstruct"] = "gpt-3.5-turbo-instruct";
@@ -29342,12 +29349,6 @@ var AxAIOpenAIEmbedModel;
29342
29349
  promptTokenCostPer1M: 5,
29343
29350
  completionTokenCostPer1M: 15
29344
29351
  },
29345
- {
29346
- name: AxAIOpenAIModel.GPT4OMini,
29347
- currency: "usd",
29348
- promptTokenCostPer1M: 0.15,
29349
- completionTokenCostPer1M: 0.6
29350
- },
29351
29352
  {
29352
29353
  name: AxAIOpenAIModel.GPT4Turbo,
29353
29354
  currency: "usd",
@@ -29503,7 +29504,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29503
29504
  _inherits$f(AxAIOpenAI, AxBaseAI);
29504
29505
  var _super = _create_super$f(AxAIOpenAI);
29505
29506
  function AxAIOpenAI(param) {
29506
- var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo, modelMap = param.modelMap;
29507
+ var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo;
29507
29508
  _class_call_check$o(this, AxAIOpenAI);
29508
29509
  var _this;
29509
29510
  if (!apiKey || apiKey === "") {
@@ -29525,16 +29526,14 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29525
29526
  supportFor: {
29526
29527
  functions: true,
29527
29528
  streaming: true
29528
- },
29529
- modelMap: modelMap
29529
+ }
29530
29530
  });
29531
29531
  _define_property$o(_assert_this_initialized$f(_this), "config", void 0);
29532
29532
  _define_property$o(_assert_this_initialized$f(_this), "streamingUsage", void 0);
29533
- _define_property$o(_assert_this_initialized$f(_this), "dimensions", void 0);
29534
29533
  _define_property$o(_assert_this_initialized$f(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
29535
29534
  _config) {
29536
29535
  var _req_functions, _req_modelConfig, _req_modelConfig1, _this_config, _this_config1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6, _req_modelConfig7;
29537
- var model = req.model;
29536
+ var model = _this.config.model;
29538
29537
  if (!req.chatPrompt || req.chatPrompt.length === 0) {
29539
29538
  throw new Error("Chat prompt is empty");
29540
29539
  }
@@ -29587,7 +29586,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29587
29586
  ];
29588
29587
  });
29589
29588
  _define_property$o(_assert_this_initialized$f(_this), "generateEmbedReq", function(req) {
29590
- var model = req.embedModel;
29589
+ var model = _this.config.embedModel;
29591
29590
  if (!model) {
29592
29591
  throw new Error("Embed model not set");
29593
29592
  }
@@ -29599,8 +29598,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29599
29598
  };
29600
29599
  var reqValue = {
29601
29600
  model: model,
29602
- input: req.texts,
29603
- dimensions: _this.dimensions
29601
+ input: req.texts
29604
29602
  };
29605
29603
  return [
29606
29604
  apiConfig,
@@ -29709,7 +29707,6 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29709
29707
  _this.config = _config;
29710
29708
  var _options_streamingUsage;
29711
29709
  _this.streamingUsage = (_options_streamingUsage = options === null || options === void 0 ? void 0 : options.streamingUsage) !== null && _options_streamingUsage !== void 0 ? _options_streamingUsage : true;
29712
- _this.dimensions = config === null || config === void 0 ? void 0 : config.dimensions;
29713
29710
  return _this;
29714
29711
  }
29715
29712
  _create_class$b(AxAIOpenAI, [
@@ -29943,7 +29940,7 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
29943
29940
  _inherits$e(AxAIAzureOpenAI, AxAIOpenAI);
29944
29941
  var _super = _create_super$e(AxAIAzureOpenAI);
29945
29942
  function AxAIAzureOpenAI(param) {
29946
- var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options, modelMap = param.modelMap;
29943
+ var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options;
29947
29944
  _class_call_check$n(this, AxAIAzureOpenAI);
29948
29945
  if (!apiKey || apiKey === "") {
29949
29946
  throw new Error("Azure OpenAPI API key not set");
@@ -29958,8 +29955,7 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
29958
29955
  var _this = _super.call(this, {
29959
29956
  apiKey: apiKey,
29960
29957
  config: _config,
29961
- options: options,
29962
- modelMap: modelMap
29958
+ options: options
29963
29959
  });
29964
29960
  var host = resourceName.includes("://") ? resourceName : "https://".concat(resourceName, ".openai.azure.com/");
29965
29961
  _get$5((_assert_this_initialized$e(_this), _get_prototype_of$e(AxAIAzureOpenAI.prototype)), "setName", _this).call(_this, "Azure OpenAI");
@@ -30105,7 +30101,7 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
30105
30101
  _inherits$d(AxAIHuggingFace, AxBaseAI);
30106
30102
  var _super = _create_super$d(AxAIHuggingFace);
30107
30103
  function AxAIHuggingFace(param) {
30108
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30104
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30109
30105
  _class_call_check$m(this, AxAIHuggingFace);
30110
30106
  var _this;
30111
30107
  if (!apiKey || apiKey === "") {
@@ -30126,14 +30122,13 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
30126
30122
  supportFor: {
30127
30123
  functions: false,
30128
30124
  streaming: false
30129
- },
30130
- modelMap: modelMap
30125
+ }
30131
30126
  });
30132
30127
  _define_property$m(_assert_this_initialized$d(_this), "config", void 0);
30133
30128
  _define_property$m(_assert_this_initialized$d(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
30134
30129
  _config) {
30135
30130
  var _req_chatPrompt, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4;
30136
- var model = req.model;
30131
+ var model = _this.config.model;
30137
30132
  var functionsList = req.functions ? "Functions:\n".concat(JSON.stringify(req.functions, null, 2), "\n") : "";
30138
30133
  var prompt = (_req_chatPrompt = req.chatPrompt) === null || _req_chatPrompt === void 0 ? void 0 : _req_chatPrompt.map(function(msg) {
30139
30134
  switch(msg.role){
@@ -30355,7 +30350,7 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
30355
30350
  _inherits$c(AxAITogether, AxAIOpenAI);
30356
30351
  var _super = _create_super$c(AxAITogether);
30357
30352
  function AxAITogether(param) {
30358
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30353
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30359
30354
  _class_call_check$l(this, AxAITogether);
30360
30355
  if (!apiKey || apiKey === "") {
30361
30356
  throw new Error("Together API key not set");
@@ -30366,8 +30361,7 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
30366
30361
  config: _config,
30367
30362
  options: options,
30368
30363
  apiURL: "https://api.together.xyz/v1",
30369
- modelInfo: axModelInfoTogether,
30370
- modelMap: modelMap
30364
+ modelInfo: axModelInfoTogether
30371
30365
  });
30372
30366
  _get$4((_assert_this_initialized$c(_this), _get_prototype_of$c(AxAITogether.prototype)), "setName", _this).call(_this, "Together");
30373
30367
  return _this;
@@ -30641,7 +30635,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30641
30635
  _inherits$b(AxAICohere, AxBaseAI);
30642
30636
  var _super = _create_super$b(AxAICohere);
30643
30637
  function AxAICohere(param) {
30644
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30638
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30645
30639
  _class_call_check$k(this, AxAICohere);
30646
30640
  var _this;
30647
30641
  if (!apiKey || apiKey === "") {
@@ -30662,14 +30656,13 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30662
30656
  functions: true,
30663
30657
  streaming: true
30664
30658
  },
30665
- options: options,
30666
- modelMap: modelMap
30659
+ options: options
30667
30660
  });
30668
30661
  _define_property$k(_assert_this_initialized$b(_this), "config", void 0);
30669
30662
  _define_property$k(_assert_this_initialized$b(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
30670
30663
  _config) {
30671
30664
  var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6;
30672
- var model = req.model;
30665
+ var model = _this.config.model;
30673
30666
  var lastChatMsg = req.chatPrompt.at(-1);
30674
30667
  var restOfChat = req.chatPrompt.slice(0, -1);
30675
30668
  var message;
@@ -30764,7 +30757,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30764
30757
  ];
30765
30758
  });
30766
30759
  _define_property$k(_assert_this_initialized$b(_this), "generateEmbedReq", function(req) {
30767
- var model = req.embedModel;
30760
+ var model = _this.config.embedModel;
30768
30761
  if (!model) {
30769
30762
  throw new Error("Embed model not set");
30770
30763
  }
@@ -31163,7 +31156,7 @@ var safetySettings = [
31163
31156
  _inherits$a(AxAIGoogleGemini, AxBaseAI);
31164
31157
  var _super = _create_super$a(AxAIGoogleGemini);
31165
31158
  function AxAIGoogleGemini(param) {
31166
- var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options, modelMap = param.modelMap;
31159
+ var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options;
31167
31160
  _class_call_check$j(this, AxAIGoogleGemini);
31168
31161
  var _this;
31169
31162
  if (!apiKey || apiKey === "") {
@@ -31187,15 +31180,14 @@ var safetySettings = [
31187
31180
  supportFor: {
31188
31181
  functions: true,
31189
31182
  streaming: true
31190
- },
31191
- modelMap: modelMap
31183
+ }
31192
31184
  });
31193
31185
  _define_property$j(_assert_this_initialized$a(_this), "options", void 0);
31194
31186
  _define_property$j(_assert_this_initialized$a(_this), "config", void 0);
31195
31187
  _define_property$j(_assert_this_initialized$a(_this), "apiKey", void 0);
31196
31188
  _define_property$j(_assert_this_initialized$a(_this), "generateChatReq", function(req) {
31197
31189
  var _req_modelConfig, _this_options, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
31198
- var model = req.model;
31190
+ var model = _this.config.model;
31199
31191
  var _req_modelConfig_stream;
31200
31192
  var stream = (_req_modelConfig_stream = (_req_modelConfig = req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream) !== null && _req_modelConfig_stream !== void 0 ? _req_modelConfig_stream : _this.config.stream;
31201
31193
  if (!req.chatPrompt || req.chatPrompt.length === 0) {
@@ -31375,7 +31367,7 @@ var safetySettings = [
31375
31367
  ];
31376
31368
  });
31377
31369
  _define_property$j(_assert_this_initialized$a(_this), "generateEmbedReq", function(req) {
31378
- var model = req.embedModel;
31370
+ var model = _this.config.embedModel;
31379
31371
  if (!model) {
31380
31372
  throw new Error("Embed model not set");
31381
31373
  }
@@ -31739,7 +31731,7 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
31739
31731
  _inherits$9(AxAIAnthropic, AxBaseAI);
31740
31732
  var _super = _create_super$9(AxAIAnthropic);
31741
31733
  function AxAIAnthropic(param) {
31742
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
31734
+ var apiKey = param.apiKey, config = param.config, options = param.options;
31743
31735
  _class_call_check$i(this, AxAIAnthropic);
31744
31736
  var _this;
31745
31737
  if (!apiKey || apiKey === "") {
@@ -31761,13 +31753,12 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
31761
31753
  supportFor: {
31762
31754
  functions: true,
31763
31755
  streaming: true
31764
- },
31765
- modelMap: modelMap
31756
+ }
31766
31757
  });
31767
31758
  _define_property$i(_assert_this_initialized$9(_this), "config", void 0);
31768
31759
  _define_property$i(_assert_this_initialized$9(_this), "generateChatReq", function(req) {
31769
31760
  var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
31770
- var model = req.model;
31761
+ var model = _this.config.model;
31771
31762
  var apiConfig = {
31772
31763
  name: "/messages"
31773
31764
  };
@@ -32664,7 +32655,7 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32664
32655
  _inherits$8(AxAIGroq, AxAIOpenAI);
32665
32656
  var _super = _create_super$8(AxAIGroq);
32666
32657
  function AxAIGroq(param) {
32667
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
32658
+ var apiKey = param.apiKey, config = param.config, options = param.options;
32668
32659
  _class_call_check$g(this, AxAIGroq);
32669
32660
  var _this;
32670
32661
  if (!apiKey || apiKey === "") {
@@ -32679,8 +32670,7 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32679
32670
  config: _config,
32680
32671
  options: _options,
32681
32672
  apiURL: "https://api.groq.com/openai/v1",
32682
- modelInfo: [],
32683
- modelMap: modelMap
32673
+ modelInfo: []
32684
32674
  });
32685
32675
  _define_property$g(_assert_this_initialized$8(_this), "setOptions", function(options) {
32686
32676
  var rateLimiter = _this.newRateLimiter(options);
@@ -32736,23 +32726,19 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32736
32726
  }
32737
32727
  (AxAIOpenAI);
32738
32728
 
32739
- // cspell:ignore mistral, mixtral, codestral, nemo
32740
32729
  var AxAIMistralModel;
32741
32730
  (function(AxAIMistralModel) {
32742
32731
  AxAIMistralModel["Mistral7B"] = "open-mistral-7b";
32743
32732
  AxAIMistralModel["Mistral8x7B"] = "open-mixtral-8x7b";
32744
32733
  AxAIMistralModel["MistralSmall"] = "mistral-small-latest";
32734
+ AxAIMistralModel["MistralMedium"] = "mistral-medium-latest";
32745
32735
  AxAIMistralModel["MistralLarge"] = "mistral-large-latest";
32746
- AxAIMistralModel["Codestral"] = "codestral-latest";
32747
- AxAIMistralModel["OpenCodestralMamba"] = "open-codestral-mamba";
32748
- AxAIMistralModel["OpenMistralNemo"] = "open-mistral-nemo-latest";
32749
32736
  })(AxAIMistralModel || (AxAIMistralModel = {}));
32750
32737
  var AxAIMistralEmbedModels;
32751
32738
  (function(AxAIMistralEmbedModels) {
32752
32739
  AxAIMistralEmbedModels["MistralEmbed"] = "mistral-embed";
32753
32740
  })(AxAIMistralEmbedModels || (AxAIMistralEmbedModels = {}));
32754
32741
 
32755
- // cspell:ignore mistral, mixtral, codestral, nemo
32756
32742
  var axModelInfoMistral = [
32757
32743
  {
32758
32744
  name: AxAIMistralModel.Mistral7B,
@@ -32773,28 +32759,16 @@ var axModelInfoMistral = [
32773
32759
  completionTokenCostPer1M: 6
32774
32760
  },
32775
32761
  {
32776
- name: AxAIMistralModel.MistralLarge,
32777
- currency: "USD",
32778
- promptTokenCostPer1M: 8,
32779
- completionTokenCostPer1M: 24
32780
- },
32781
- {
32782
- name: AxAIMistralModel.Codestral,
32762
+ name: AxAIMistralModel.MistralMedium,
32783
32763
  currency: "USD",
32784
- promptTokenCostPer1M: 1,
32785
- completionTokenCostPer1M: 3
32764
+ promptTokenCostPer1M: 2.7,
32765
+ completionTokenCostPer1M: 8.1
32786
32766
  },
32787
32767
  {
32788
- name: AxAIMistralModel.OpenCodestralMamba,
32789
- currency: "USD",
32790
- promptTokenCostPer1M: 0.25,
32791
- completionTokenCostPer1M: 0.25
32792
- },
32793
- {
32794
- name: AxAIMistralModel.OpenMistralNemo,
32768
+ name: AxAIMistralModel.MistralLarge,
32795
32769
  currency: "USD",
32796
- promptTokenCostPer1M: 0.3,
32797
- completionTokenCostPer1M: 0.3
32770
+ promptTokenCostPer1M: 8,
32771
+ completionTokenCostPer1M: 24
32798
32772
  }
32799
32773
  ];
32800
32774
 
@@ -32929,7 +32903,7 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
32929
32903
  _inherits$7(AxAIMistral, AxAIOpenAI);
32930
32904
  var _super = _create_super$7(AxAIMistral);
32931
32905
  function AxAIMistral(param) {
32932
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
32906
+ var apiKey = param.apiKey, config = param.config, options = param.options;
32933
32907
  _class_call_check$f(this, AxAIMistral);
32934
32908
  if (!apiKey || apiKey === "") {
32935
32909
  throw new Error("Mistral API key not set");
@@ -32940,8 +32914,7 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
32940
32914
  config: _config,
32941
32915
  options: options,
32942
32916
  apiURL: "https://api.mistral.ai/v1",
32943
- modelInfo: axModelInfoMistral,
32944
- modelMap: modelMap
32917
+ modelInfo: axModelInfoMistral
32945
32918
  });
32946
32919
  _get$2((_assert_this_initialized$7(_this), _get_prototype_of$7(AxAIMistral.prototype)), "setName", _this).call(_this, "Mistral");
32947
32920
  return _this;
@@ -33105,7 +33078,7 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
33105
33078
  _inherits$6(AxAIDeepSeek, AxAIOpenAI);
33106
33079
  var _super = _create_super$6(AxAIDeepSeek);
33107
33080
  function AxAIDeepSeek(param) {
33108
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
33081
+ var apiKey = param.apiKey, config = param.config, options = param.options;
33109
33082
  _class_call_check$e(this, AxAIDeepSeek);
33110
33083
  if (!apiKey || apiKey === "") {
33111
33084
  throw new Error("DeepSeek API key not set");
@@ -33116,8 +33089,7 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
33116
33089
  config: _config,
33117
33090
  options: options,
33118
33091
  apiURL: "https://api.deepseek.com",
33119
- modelInfo: axModelInfoDeepSeek,
33120
- modelMap: modelMap
33092
+ modelInfo: axModelInfoDeepSeek
33121
33093
  });
33122
33094
  _get$1((_assert_this_initialized$6(_this), _get_prototype_of$6(AxAIDeepSeek.prototype)), "setName", _this).call(_this, "DeepSeek");
33123
33095
  return _this;
@@ -33285,15 +33257,14 @@ var axAIOllamaDefaultConfig = function() {
33285
33257
  _inherits$5(AxAIOllama, AxAIOpenAI);
33286
33258
  var _super = _create_super$5(AxAIOllama);
33287
33259
  function AxAIOllama(param) {
33288
- var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options, modelMap = param.modelMap;
33260
+ var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options;
33289
33261
  _class_call_check$d(this, AxAIOllama);
33290
33262
  var _config = _object_spread$6({}, axAIOllamaDefaultConfig(), config);
33291
33263
  var _this = _super.call(this, {
33292
33264
  apiKey: apiKey,
33293
33265
  options: options,
33294
33266
  config: _config,
33295
- apiURL: new URL("/api", url).href,
33296
- modelMap: modelMap
33267
+ apiURL: new URL("/v1", url).href
33297
33268
  });
33298
33269
  _get((_assert_this_initialized$5(_this), _get_prototype_of$5(AxAIOllama.prototype)), "setName", _this).call(_this, "Ollama");
33299
33270
  return _this;
@@ -33501,6 +33472,18 @@ var AxAI = /*#__PURE__*/ function() {
33501
33472
  }
33502
33473
  }
33503
33474
  _create_class$5(AxAI, [
33475
+ {
33476
+ key: "setModelMap",
33477
+ value: function setModelMap(modelMap) {
33478
+ this.ai.setModelMap(modelMap);
33479
+ }
33480
+ },
33481
+ {
33482
+ key: "setEmbedModelMap",
33483
+ value: function setEmbedModelMap(modelMap) {
33484
+ this.ai.setEmbedModelMap(modelMap);
33485
+ }
33486
+ },
33504
33487
  {
33505
33488
  key: "getName",
33506
33489
  value: function getName() {
@@ -33531,12 +33514,6 @@ var AxAI = /*#__PURE__*/ function() {
33531
33514
  return this.ai.getFeatures();
33532
33515
  }
33533
33516
  },
33534
- {
33535
- key: "getModelMap",
33536
- value: function getModelMap() {
33537
- return this.ai.getModelMap();
33538
- }
33539
- },
33540
33517
  {
33541
33518
  key: "chat",
33542
33519
  value: function chat(req, options) {
@@ -34095,8 +34072,8 @@ var assertRequiredFields = function(sig, values) {
34095
34072
  });
34096
34073
  if (missingFields.length > 0) {
34097
34074
  throw new AxAssertionError({
34098
- message: "Output must include: t: ".concat(missingFields.map(function(f) {
34099
- return "`".concat(f.title, ":`");
34075
+ message: "Missing required fields: ".concat(missingFields.map(function(f) {
34076
+ return f.name;
34100
34077
  }).join(", ")),
34101
34078
  values: values
34102
34079
  });
@@ -38902,7 +38879,7 @@ var AxPromptTemplate = function AxPromptTemplate(sig, fieldTemplates) {
38902
38879
  this.outputFormat = {
38903
38880
  type: "text",
38904
38881
  text: [
38905
- "Use the following output format."
38882
+ "Follow the following format."
38906
38883
  ].concat(_to_consumable_array$2(this.renderOutFields(this.sig.getOutputFields())), [
38907
38884
  "---\n\n"
38908
38885
  ]).join("\n\n")
@@ -40718,7 +40695,9 @@ let DspService = class DspService {
40718
40695
  }
40719
40696
  async modelOpenAI(params, _pinsSettingsList, context) {
40720
40697
  var _context_privates_OPENAI_API_KEY, _context_privates_OPENAI_SERVER;
40721
- const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config, options } = params;
40698
+ const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config = {
40699
+ model: 'gpt-4o-mini'
40700
+ }, options } = params;
40722
40701
  const modelInstance = new AxAIOpenAI({
40723
40702
  apiKey,
40724
40703
  apiURL,