@digipair/skill-dsp 0.21.15 → 0.22.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.cjs.js CHANGED
@@ -20358,7 +20358,6 @@ about the parse state.
20358
20358
  var _a;
20359
20359
  var depth = action >> 19 /* Action.ReduceDepthShift */ , type = action & 65535 /* Action.ValueMask */ ;
20360
20360
  var parser = this.p.parser;
20361
- if (this.reducePos < this.pos - 25 /* Lookahead.Margin */ ) this.setLookAhead(this.pos);
20362
20361
  var dPrec = parser.dynamicPrecedence(type);
20363
20362
  if (dPrec) this.score += dPrec;
20364
20363
  if (depth == 0) {
@@ -21311,7 +21310,7 @@ function cutAt(tree, pos, side) {
21311
21310
  cursor.moveTo(pos);
21312
21311
  for(;;){
21313
21312
  if (!(side < 0 ? cursor.childBefore(pos) : cursor.childAfter(pos))) for(;;){
21314
- if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Lookahead.Margin */ )) : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Lookahead.Margin */ ));
21313
+ if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Safety.Margin */ )) : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Safety.Margin */ ));
21315
21314
  if (side < 0 ? cursor.prevSibling() : cursor.nextSibling()) break;
21316
21315
  if (!cursor.parent()) return side < 0 ? 0 : tree.length;
21317
21316
  }
@@ -21434,7 +21433,7 @@ var TokenCache = /*#__PURE__*/ function() {
21434
21433
  token.mask = mask;
21435
21434
  token.context = context;
21436
21435
  }
21437
- if (token.lookAhead > token.end + 25 /* Lookahead.Margin */ ) lookAhead = Math.max(token.lookAhead, lookAhead);
21436
+ if (token.lookAhead > token.end + 25 /* Safety.Margin */ ) lookAhead = Math.max(token.lookAhead, lookAhead);
21438
21437
  if (token.value != 0 /* Term.Err */ ) {
21439
21438
  var startIndex = actionIndex;
21440
21439
  if (token.extended > -1) actionIndex = this.addActions(stack, token.extended, token.end, actionIndex);
@@ -23525,14 +23524,14 @@ function indent(str, spaces) {
23525
23524
  var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
23526
23525
  // match is required
23527
23526
  if (!match) {
23528
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, {
23527
+ return nextMatch = nextMatch1, tokens = tokens1, i = i1, {
23529
23528
  v: nextMatch1
23530
23529
  };
23531
23530
  }
23532
23531
  var token = match.token, offset = match.offset;
23533
23532
  i1 += offset;
23534
23533
  if (token === " ") {
23535
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, "continue";
23534
+ return nextMatch = nextMatch1, tokens = tokens1, i = i1, "continue";
23536
23535
  }
23537
23536
  tokens1 = _to_consumable_array$6(tokens1).concat([
23538
23537
  token
@@ -23551,7 +23550,7 @@ function indent(str, spaces) {
23551
23550
  if (contextKeys.some(function(el) {
23552
23551
  return el.startsWith(name);
23553
23552
  })) {
23554
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, "continue";
23553
+ return nextMatch = nextMatch1, tokens = tokens1, i = i1, "continue";
23555
23554
  }
23556
23555
  if (dateTimeIdentifiers.some(function(el) {
23557
23556
  return el === name;
@@ -23570,9 +23569,9 @@ function indent(str, spaces) {
23570
23569
  if (dateTimeIdentifiers.some(function(el) {
23571
23570
  return el.startsWith(name);
23572
23571
  })) {
23573
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, "continue";
23572
+ return nextMatch = nextMatch1, tokens = tokens1, i = i1, "continue";
23574
23573
  }
23575
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, {
23574
+ return nextMatch = nextMatch1, tokens = tokens1, i = i1, {
23576
23575
  v: nextMatch1
23577
23576
  };
23578
23577
  };
@@ -24375,7 +24374,6 @@ var parser = LRParser.deserialize({
24375
24374
  },
24376
24375
  dynamicPrecedences: {
24377
24376
  "31": -1,
24378
- "67": 1,
24379
24377
  "71": -1,
24380
24378
  "73": -1
24381
24379
  },
@@ -27379,6 +27377,7 @@ const applyTemplate = (value, context)=>{
27379
27377
  const path = result.replace(/^EVALUATE:/, '');
27380
27378
  result = evaluate(path, _extends({}, context, {
27381
27379
  getTime: (time)=>new Date(time).getTime(),
27380
+ fromTime: (time)=>new Date(time).toISOString(),
27382
27381
  atob: (value)=>atob(value),
27383
27382
  btoa: (value)=>btoa(value),
27384
27383
  encodeURIComponent: (value)=>encodeURIComponent(value),
@@ -28637,7 +28636,7 @@ var axBaseAIDefaultConfig = function() {
28637
28636
  };
28638
28637
  var AxBaseAI = /*#__PURE__*/ function() {
28639
28638
  function AxBaseAI(param) {
28640
- var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor, modelMap = param.modelMap;
28639
+ var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor;
28641
28640
  _class_call_check$p(this, AxBaseAI);
28642
28641
  _define_property$p(this, "generateChatReq", void 0);
28643
28642
  _define_property$p(this, "generateEmbedReq", void 0);
@@ -28649,29 +28648,48 @@ var AxBaseAI = /*#__PURE__*/ function() {
28649
28648
  _define_property$p(this, "fetch", void 0);
28650
28649
  _define_property$p(this, "tracer", void 0);
28651
28650
  _define_property$p(this, "modelMap", void 0);
28652
- _define_property$p(this, "modelInfo", void 0);
28651
+ _define_property$p(this, "embedModelMap", void 0);
28653
28652
  _define_property$p(this, "modelUsage", void 0);
28654
28653
  _define_property$p(this, "embedModelUsage", void 0);
28655
- _define_property$p(this, "models", void 0);
28656
28654
  _define_property$p(this, "apiURL", void 0);
28657
28655
  _define_property$p(this, "name", void 0);
28658
28656
  _define_property$p(this, "headers", void 0);
28657
+ _define_property$p(this, "modelInfo", void 0);
28658
+ _define_property$p(this, "embedModelInfo", void 0);
28659
28659
  _define_property$p(this, "supportFor", void 0);
28660
28660
  this.name = name;
28661
28661
  this.apiURL = apiURL;
28662
28662
  this.headers = headers;
28663
28663
  this.supportFor = supportFor;
28664
28664
  this.tracer = options.tracer;
28665
- this.modelInfo = modelInfo;
28666
- this.modelMap = modelMap;
28667
- var _modelMap_models_model, _models_embedModel, _modelMap_;
28668
- this.models = {
28669
- model: (_modelMap_models_model = modelMap === null || modelMap === void 0 ? void 0 : modelMap[models.model]) !== null && _modelMap_models_model !== void 0 ? _modelMap_models_model : models.model,
28670
- embedModel: (_modelMap_ = modelMap === null || modelMap === void 0 ? void 0 : modelMap[(_models_embedModel = models.embedModel) !== null && _models_embedModel !== void 0 ? _models_embedModel : ""]) !== null && _modelMap_ !== void 0 ? _modelMap_ : models.embedModel
28671
- };
28672
- if (!models.model || typeof models.model !== "string" || models.model === "") {
28665
+ var model = this.getModel(models.model);
28666
+ var embedModel = this.getEmbedModel(models.embedModel);
28667
+ if (typeof model === "string") {
28668
+ var modelName = model.replace(/-0\d+$|-\d{2,}$/, "");
28669
+ var _modelInfo_filter_at;
28670
+ this.modelInfo = (_modelInfo_filter_at = modelInfo.filter(function(v) {
28671
+ return v.name === modelName;
28672
+ }).at(0)) !== null && _modelInfo_filter_at !== void 0 ? _modelInfo_filter_at : {
28673
+ name: model,
28674
+ currency: "usd",
28675
+ promptTokenCostPer1M: 0,
28676
+ completionTokenCostPer1M: 0
28677
+ };
28678
+ } else {
28673
28679
  throw new Error("No model defined");
28674
28680
  }
28681
+ if (typeof embedModel === "string") {
28682
+ var embedModelName = embedModel === null || embedModel === void 0 ? void 0 : embedModel.replace(/-0\d+$|-\d{2,}$/, "");
28683
+ var _modelInfo_filter_at1;
28684
+ this.embedModelInfo = (_modelInfo_filter_at1 = modelInfo.filter(function(v) {
28685
+ return v.name === embedModelName;
28686
+ }).at(0)) !== null && _modelInfo_filter_at1 !== void 0 ? _modelInfo_filter_at1 : {
28687
+ name: embedModel !== null && embedModel !== void 0 ? embedModel : "",
28688
+ currency: "usd",
28689
+ promptTokenCostPer1M: 0,
28690
+ completionTokenCostPer1M: 0
28691
+ };
28692
+ }
28675
28693
  this.setOptions(options);
28676
28694
  }
28677
28695
  _create_class$c(AxBaseAI, [
@@ -28711,27 +28729,21 @@ var AxBaseAI = /*#__PURE__*/ function() {
28711
28729
  }
28712
28730
  },
28713
28731
  {
28714
- key: "_getModelInfo",
28715
- value: function _getModelInfo(model) {
28716
- var _this_modelMap;
28717
- var _this_modelMap_model;
28718
- var _model = (_this_modelMap_model = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[model]) !== null && _this_modelMap_model !== void 0 ? _this_modelMap_model : model;
28719
- var modelName = _model.replace(/-0\d+$|-\d{2,}$/, "");
28720
- var _this_modelInfo_filter_at;
28721
- return (_this_modelInfo_filter_at = this.modelInfo.filter(function(v) {
28722
- return v.name === modelName;
28723
- }).at(0)) !== null && _this_modelInfo_filter_at !== void 0 ? _this_modelInfo_filter_at : {
28724
- name: model,
28725
- currency: "usd",
28726
- promptTokenCostPer1M: 0,
28727
- completionTokenCostPer1M: 0
28728
- };
28732
+ key: "setModelMap",
28733
+ value: function setModelMap(modelMap) {
28734
+ this.modelMap = modelMap;
28735
+ }
28736
+ },
28737
+ {
28738
+ key: "setEmbedModelMap",
28739
+ value: function setEmbedModelMap(embedModelMap) {
28740
+ this.embedModelMap = embedModelMap;
28729
28741
  }
28730
28742
  },
28731
28743
  {
28732
28744
  key: "getModelInfo",
28733
28745
  value: function getModelInfo() {
28734
- return _object_spread_props$8(_object_spread$h({}, this._getModelInfo(this.models.model)), {
28746
+ return _object_spread_props$8(_object_spread$h({}, this.modelInfo), {
28735
28747
  provider: this.name
28736
28748
  });
28737
28749
  }
@@ -28739,17 +28751,9 @@ var AxBaseAI = /*#__PURE__*/ function() {
28739
28751
  {
28740
28752
  key: "getEmbedModelInfo",
28741
28753
  value: function getEmbedModelInfo() {
28742
- if (this.models.embedModel) {
28743
- return _object_spread_props$8(_object_spread$h({}, this._getModelInfo(this.models.embedModel)), {
28744
- provider: this.name
28745
- });
28746
- }
28747
- }
28748
- },
28749
- {
28750
- key: "getModelMap",
28751
- value: function getModelMap() {
28752
- return this.modelMap;
28754
+ return this.embedModelInfo ? _object_spread_props$8(_object_spread$h({}, this.embedModelInfo), {
28755
+ provider: this.name
28756
+ }) : undefined;
28753
28757
  }
28754
28758
  },
28755
28759
  {
@@ -28772,14 +28776,13 @@ var AxBaseAI = /*#__PURE__*/ function() {
28772
28776
  },
28773
28777
  {
28774
28778
  key: "chat",
28775
- value: function chat(req, options) {
28779
+ value: function chat(_req, options) {
28776
28780
  var _this = this;
28777
28781
  return _async_to_generator$7(function() {
28778
- var _this_modelMap, _this_modelMap_req_model, model, _mc_stopSequences, _this_tracer, mc, _obj;
28782
+ var _mc_stopSequences, _this_tracer, mc, _obj;
28779
28783
  return _ts_generator$8(this, function(_state) {
28780
28784
  switch(_state.label){
28781
28785
  case 0:
28782
- model = req.model ? (_this_modelMap_req_model = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.model]) !== null && _this_modelMap_req_model !== void 0 ? _this_modelMap_req_model : req.model : _this.models.model;
28783
28786
  if (!_this.tracer) return [
28784
28787
  3,
28785
28788
  2
@@ -28789,7 +28792,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28789
28792
  4,
28790
28793
  (_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Chat Request", {
28791
28794
  kind: AxSpanKind.SERVER,
28792
- attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, model), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MAX_TOKENS, mc.maxTokens), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TEMPERATURE, mc.temperature), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_P, mc.topP), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_K, mc.topK), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_FREQUENCY_PENALTY, mc.frequencyPenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_PRESENCE_PENALTY, mc.presencePenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_STOP_SEQUENCES, (_mc_stopSequences = mc.stopSequences) === null || _mc_stopSequences === void 0 ? void 0 : _mc_stopSequences.join(", ")), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING, mc.stream), _obj)
28795
+ attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.modelInfo.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MAX_TOKENS, mc.maxTokens), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TEMPERATURE, mc.temperature), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_P, mc.topP), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_K, mc.topK), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_FREQUENCY_PENALTY, mc.frequencyPenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_PRESENCE_PENALTY, mc.presencePenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_STOP_SEQUENCES, (_mc_stopSequences = mc.stopSequences) === null || _mc_stopSequences === void 0 ? void 0 : _mc_stopSequences.join(", ")), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING, mc.stream), _obj)
28793
28796
  }, function() {
28794
28797
  var _ref = _async_to_generator$7(function(span) {
28795
28798
  var res;
@@ -28798,7 +28801,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28798
28801
  case 0:
28799
28802
  return [
28800
28803
  4,
28801
- _this._chat(model, req, options, span)
28804
+ _this._chat(_req, options, span)
28802
28805
  ];
28803
28806
  case 1:
28804
28807
  res = _state.sent();
@@ -28823,7 +28826,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28823
28826
  case 2:
28824
28827
  return [
28825
28828
  4,
28826
- _this._chat(model, req, options)
28829
+ _this._chat(_req, options)
28827
28830
  ];
28828
28831
  case 3:
28829
28832
  return [
@@ -28837,10 +28840,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
28837
28840
  },
28838
28841
  {
28839
28842
  key: "_chat",
28840
- value: function _chat(model, chatReq, options, span) {
28843
+ value: function _chat(_req, options, span) {
28841
28844
  var _this = this;
28842
28845
  return _async_to_generator$7(function() {
28843
- var _chatReq_modelConfig, reqFn, _options_stream, stream, functions, req, fn, rv, _tmp, respFn, wrappedRespFn, doneCb, st, res;
28846
+ var _req_modelConfig, reqFn, _options_stream, stream, functions, req, fn, rv, _tmp, respFn, wrappedRespFn, doneCb, st, res;
28844
28847
  return _ts_generator$8(this, function(_state) {
28845
28848
  switch(_state.label){
28846
28849
  case 0:
@@ -28848,14 +28851,11 @@ var AxBaseAI = /*#__PURE__*/ function() {
28848
28851
  throw new Error("generateChatReq not implemented");
28849
28852
  }
28850
28853
  reqFn = _this.generateChatReq;
28851
- stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (_chatReq_modelConfig = chatReq.modelConfig) === null || _chatReq_modelConfig === void 0 ? void 0 : _chatReq_modelConfig.stream;
28852
- if (chatReq.functions && chatReq.functions.length > 0) {
28853
- functions = chatReq.functions;
28854
- }
28855
- req = _object_spread_props$8(_object_spread$h({}, chatReq), {
28856
- model: model,
28854
+ stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (_req_modelConfig = _req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream;
28855
+ functions = _req.functions && _req.functions.length > 0 ? _req.functions : undefined;
28856
+ req = _object_spread_props$8(_object_spread$h({}, _req), {
28857
28857
  functions: functions,
28858
- modelConfig: _object_spread_props$8(_object_spread$h({}, chatReq.modelConfig), {
28858
+ modelConfig: _object_spread_props$8(_object_spread$h({}, _req.modelConfig), {
28859
28859
  stream: stream
28860
28860
  })
28861
28861
  });
@@ -28992,14 +28992,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
28992
28992
  value: function embed(req, options) {
28993
28993
  var _this = this;
28994
28994
  return _async_to_generator$7(function() {
28995
- var _this_modelMap, _this_modelMap_req_embedModel, embedModel, _this_tracer, _req_embedModel, _obj;
28995
+ var _this_tracer, _obj;
28996
28996
  return _ts_generator$8(this, function(_state) {
28997
28997
  switch(_state.label){
28998
28998
  case 0:
28999
- embedModel = req.embedModel ? (_this_modelMap_req_embedModel = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.embedModel]) !== null && _this_modelMap_req_embedModel !== void 0 ? _this_modelMap_req_embedModel : req.embedModel : _this.models.embedModel;
29000
- if (!embedModel) {
29001
- throw new Error("No embed model defined");
29002
- }
29003
28999
  if (!_this.tracer) return [
29004
29000
  3,
29005
29001
  2
@@ -29008,7 +29004,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
29008
29004
  4,
29009
29005
  (_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Embed Request", {
29010
29006
  kind: AxSpanKind.SERVER,
29011
- attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, (_req_embedModel = req.embedModel) !== null && _req_embedModel !== void 0 ? _req_embedModel : _this.models.embedModel), _obj)
29007
+ attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.modelInfo.name), _obj)
29012
29008
  }, function() {
29013
29009
  var _ref = _async_to_generator$7(function(span) {
29014
29010
  var res;
@@ -29017,7 +29013,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
29017
29013
  case 0:
29018
29014
  return [
29019
29015
  4,
29020
- _this._embed(embedModel, req, options, span)
29016
+ _this._embed(req, options, span)
29021
29017
  ];
29022
29018
  case 1:
29023
29019
  res = _state.sent();
@@ -29042,7 +29038,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
29042
29038
  case 2:
29043
29039
  return [
29044
29040
  2,
29045
- _this._embed(embedModel, req, options)
29041
+ _this._embed(req, options)
29046
29042
  ];
29047
29043
  }
29048
29044
  });
@@ -29051,10 +29047,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
29051
29047
  },
29052
29048
  {
29053
29049
  key: "_embed",
29054
- value: function _embed(embedModel, embedReq, options, span) {
29050
+ value: function _embed(req, options, span) {
29055
29051
  var _this = this;
29056
29052
  return _async_to_generator$7(function() {
29057
- var req, fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
29053
+ var fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
29058
29054
  return _ts_generator$8(this, function(_state) {
29059
29055
  switch(_state.label){
29060
29056
  case 0:
@@ -29064,9 +29060,6 @@ var AxBaseAI = /*#__PURE__*/ function() {
29064
29060
  if (!_this.generateEmbedResp) {
29065
29061
  throw new Error("generateEmbedResp not implemented");
29066
29062
  }
29067
- req = _object_spread_props$8(_object_spread$h({}, embedReq), {
29068
- embedModel: embedModel
29069
- });
29070
29063
  fn = function() {
29071
29064
  var _ref = _async_to_generator$7(function() {
29072
29065
  var _this_generateEmbedReq, apiConfig, reqValue, res;
@@ -29148,6 +29141,22 @@ var AxBaseAI = /*#__PURE__*/ function() {
29148
29141
  var headers = arguments.length > 0 && arguments[0] !== void 0 ? arguments[0] : {};
29149
29142
  return _object_spread$h({}, headers, this.headers);
29150
29143
  }
29144
+ },
29145
+ {
29146
+ key: "getEmbedModel",
29147
+ value: function getEmbedModel(name) {
29148
+ var _this_embedModelMap;
29149
+ var _this_embedModelMap_name;
29150
+ return name ? (_this_embedModelMap_name = (_this_embedModelMap = this.embedModelMap) === null || _this_embedModelMap === void 0 ? void 0 : _this_embedModelMap[name]) !== null && _this_embedModelMap_name !== void 0 ? _this_embedModelMap_name : name : undefined;
29151
+ }
29152
+ },
29153
+ {
29154
+ key: "getModel",
29155
+ value: function getModel(name) {
29156
+ var _this_modelMap;
29157
+ var _this_modelMap_name;
29158
+ return (_this_modelMap_name = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[name]) !== null && _this_modelMap_name !== void 0 ? _this_modelMap_name : name;
29159
+ }
29151
29160
  }
29152
29161
  ]);
29153
29162
  return AxBaseAI;
@@ -29311,7 +29320,6 @@ var AxAIOpenAIModel;
29311
29320
  (function(AxAIOpenAIModel) {
29312
29321
  AxAIOpenAIModel["GPT4"] = "gpt-4";
29313
29322
  AxAIOpenAIModel["GPT4O"] = "gpt-4o";
29314
- AxAIOpenAIModel["GPT4OMini"] = "gpt-4o-mini";
29315
29323
  AxAIOpenAIModel["GPT4Turbo"] = "gpt-4-turbo";
29316
29324
  AxAIOpenAIModel["GPT35Turbo"] = "gpt-3.5-turbo";
29317
29325
  AxAIOpenAIModel["GPT35TurboInstruct"] = "gpt-3.5-turbo-instruct";
@@ -29342,12 +29350,6 @@ var AxAIOpenAIEmbedModel;
29342
29350
  promptTokenCostPer1M: 5,
29343
29351
  completionTokenCostPer1M: 15
29344
29352
  },
29345
- {
29346
- name: AxAIOpenAIModel.GPT4OMini,
29347
- currency: "usd",
29348
- promptTokenCostPer1M: 0.15,
29349
- completionTokenCostPer1M: 0.6
29350
- },
29351
29353
  {
29352
29354
  name: AxAIOpenAIModel.GPT4Turbo,
29353
29355
  currency: "usd",
@@ -29503,7 +29505,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29503
29505
  _inherits$f(AxAIOpenAI, AxBaseAI);
29504
29506
  var _super = _create_super$f(AxAIOpenAI);
29505
29507
  function AxAIOpenAI(param) {
29506
- var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo, modelMap = param.modelMap;
29508
+ var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo;
29507
29509
  _class_call_check$o(this, AxAIOpenAI);
29508
29510
  var _this;
29509
29511
  if (!apiKey || apiKey === "") {
@@ -29525,16 +29527,14 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29525
29527
  supportFor: {
29526
29528
  functions: true,
29527
29529
  streaming: true
29528
- },
29529
- modelMap: modelMap
29530
+ }
29530
29531
  });
29531
29532
  _define_property$o(_assert_this_initialized$f(_this), "config", void 0);
29532
29533
  _define_property$o(_assert_this_initialized$f(_this), "streamingUsage", void 0);
29533
- _define_property$o(_assert_this_initialized$f(_this), "dimensions", void 0);
29534
29534
  _define_property$o(_assert_this_initialized$f(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
29535
29535
  _config) {
29536
29536
  var _req_functions, _req_modelConfig, _req_modelConfig1, _this_config, _this_config1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6, _req_modelConfig7;
29537
- var model = req.model;
29537
+ var model = _this.config.model;
29538
29538
  if (!req.chatPrompt || req.chatPrompt.length === 0) {
29539
29539
  throw new Error("Chat prompt is empty");
29540
29540
  }
@@ -29587,7 +29587,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29587
29587
  ];
29588
29588
  });
29589
29589
  _define_property$o(_assert_this_initialized$f(_this), "generateEmbedReq", function(req) {
29590
- var model = req.embedModel;
29590
+ var model = _this.config.embedModel;
29591
29591
  if (!model) {
29592
29592
  throw new Error("Embed model not set");
29593
29593
  }
@@ -29599,8 +29599,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29599
29599
  };
29600
29600
  var reqValue = {
29601
29601
  model: model,
29602
- input: req.texts,
29603
- dimensions: _this.dimensions
29602
+ input: req.texts
29604
29603
  };
29605
29604
  return [
29606
29605
  apiConfig,
@@ -29709,7 +29708,6 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29709
29708
  _this.config = _config;
29710
29709
  var _options_streamingUsage;
29711
29710
  _this.streamingUsage = (_options_streamingUsage = options === null || options === void 0 ? void 0 : options.streamingUsage) !== null && _options_streamingUsage !== void 0 ? _options_streamingUsage : true;
29712
- _this.dimensions = config === null || config === void 0 ? void 0 : config.dimensions;
29713
29711
  return _this;
29714
29712
  }
29715
29713
  _create_class$b(AxAIOpenAI, [
@@ -29943,7 +29941,7 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
29943
29941
  _inherits$e(AxAIAzureOpenAI, AxAIOpenAI);
29944
29942
  var _super = _create_super$e(AxAIAzureOpenAI);
29945
29943
  function AxAIAzureOpenAI(param) {
29946
- var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options, modelMap = param.modelMap;
29944
+ var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options;
29947
29945
  _class_call_check$n(this, AxAIAzureOpenAI);
29948
29946
  if (!apiKey || apiKey === "") {
29949
29947
  throw new Error("Azure OpenAPI API key not set");
@@ -29958,8 +29956,7 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
29958
29956
  var _this = _super.call(this, {
29959
29957
  apiKey: apiKey,
29960
29958
  config: _config,
29961
- options: options,
29962
- modelMap: modelMap
29959
+ options: options
29963
29960
  });
29964
29961
  var host = resourceName.includes("://") ? resourceName : "https://".concat(resourceName, ".openai.azure.com/");
29965
29962
  _get$5((_assert_this_initialized$e(_this), _get_prototype_of$e(AxAIAzureOpenAI.prototype)), "setName", _this).call(_this, "Azure OpenAI");
@@ -30105,7 +30102,7 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
30105
30102
  _inherits$d(AxAIHuggingFace, AxBaseAI);
30106
30103
  var _super = _create_super$d(AxAIHuggingFace);
30107
30104
  function AxAIHuggingFace(param) {
30108
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30105
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30109
30106
  _class_call_check$m(this, AxAIHuggingFace);
30110
30107
  var _this;
30111
30108
  if (!apiKey || apiKey === "") {
@@ -30126,14 +30123,13 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
30126
30123
  supportFor: {
30127
30124
  functions: false,
30128
30125
  streaming: false
30129
- },
30130
- modelMap: modelMap
30126
+ }
30131
30127
  });
30132
30128
  _define_property$m(_assert_this_initialized$d(_this), "config", void 0);
30133
30129
  _define_property$m(_assert_this_initialized$d(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
30134
30130
  _config) {
30135
30131
  var _req_chatPrompt, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4;
30136
- var model = req.model;
30132
+ var model = _this.config.model;
30137
30133
  var functionsList = req.functions ? "Functions:\n".concat(JSON.stringify(req.functions, null, 2), "\n") : "";
30138
30134
  var prompt = (_req_chatPrompt = req.chatPrompt) === null || _req_chatPrompt === void 0 ? void 0 : _req_chatPrompt.map(function(msg) {
30139
30135
  switch(msg.role){
@@ -30355,7 +30351,7 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
30355
30351
  _inherits$c(AxAITogether, AxAIOpenAI);
30356
30352
  var _super = _create_super$c(AxAITogether);
30357
30353
  function AxAITogether(param) {
30358
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30354
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30359
30355
  _class_call_check$l(this, AxAITogether);
30360
30356
  if (!apiKey || apiKey === "") {
30361
30357
  throw new Error("Together API key not set");
@@ -30366,8 +30362,7 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
30366
30362
  config: _config,
30367
30363
  options: options,
30368
30364
  apiURL: "https://api.together.xyz/v1",
30369
- modelInfo: axModelInfoTogether,
30370
- modelMap: modelMap
30365
+ modelInfo: axModelInfoTogether
30371
30366
  });
30372
30367
  _get$4((_assert_this_initialized$c(_this), _get_prototype_of$c(AxAITogether.prototype)), "setName", _this).call(_this, "Together");
30373
30368
  return _this;
@@ -30641,7 +30636,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30641
30636
  _inherits$b(AxAICohere, AxBaseAI);
30642
30637
  var _super = _create_super$b(AxAICohere);
30643
30638
  function AxAICohere(param) {
30644
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30639
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30645
30640
  _class_call_check$k(this, AxAICohere);
30646
30641
  var _this;
30647
30642
  if (!apiKey || apiKey === "") {
@@ -30662,14 +30657,13 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30662
30657
  functions: true,
30663
30658
  streaming: true
30664
30659
  },
30665
- options: options,
30666
- modelMap: modelMap
30660
+ options: options
30667
30661
  });
30668
30662
  _define_property$k(_assert_this_initialized$b(_this), "config", void 0);
30669
30663
  _define_property$k(_assert_this_initialized$b(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
30670
30664
  _config) {
30671
30665
  var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6;
30672
- var model = req.model;
30666
+ var model = _this.config.model;
30673
30667
  var lastChatMsg = req.chatPrompt.at(-1);
30674
30668
  var restOfChat = req.chatPrompt.slice(0, -1);
30675
30669
  var message;
@@ -30764,7 +30758,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30764
30758
  ];
30765
30759
  });
30766
30760
  _define_property$k(_assert_this_initialized$b(_this), "generateEmbedReq", function(req) {
30767
- var model = req.embedModel;
30761
+ var model = _this.config.embedModel;
30768
30762
  if (!model) {
30769
30763
  throw new Error("Embed model not set");
30770
30764
  }
@@ -31163,7 +31157,7 @@ var safetySettings = [
31163
31157
  _inherits$a(AxAIGoogleGemini, AxBaseAI);
31164
31158
  var _super = _create_super$a(AxAIGoogleGemini);
31165
31159
  function AxAIGoogleGemini(param) {
31166
- var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options, modelMap = param.modelMap;
31160
+ var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options;
31167
31161
  _class_call_check$j(this, AxAIGoogleGemini);
31168
31162
  var _this;
31169
31163
  if (!apiKey || apiKey === "") {
@@ -31187,15 +31181,14 @@ var safetySettings = [
31187
31181
  supportFor: {
31188
31182
  functions: true,
31189
31183
  streaming: true
31190
- },
31191
- modelMap: modelMap
31184
+ }
31192
31185
  });
31193
31186
  _define_property$j(_assert_this_initialized$a(_this), "options", void 0);
31194
31187
  _define_property$j(_assert_this_initialized$a(_this), "config", void 0);
31195
31188
  _define_property$j(_assert_this_initialized$a(_this), "apiKey", void 0);
31196
31189
  _define_property$j(_assert_this_initialized$a(_this), "generateChatReq", function(req) {
31197
31190
  var _req_modelConfig, _this_options, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
31198
- var model = req.model;
31191
+ var model = _this.config.model;
31199
31192
  var _req_modelConfig_stream;
31200
31193
  var stream = (_req_modelConfig_stream = (_req_modelConfig = req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream) !== null && _req_modelConfig_stream !== void 0 ? _req_modelConfig_stream : _this.config.stream;
31201
31194
  if (!req.chatPrompt || req.chatPrompt.length === 0) {
@@ -31375,7 +31368,7 @@ var safetySettings = [
31375
31368
  ];
31376
31369
  });
31377
31370
  _define_property$j(_assert_this_initialized$a(_this), "generateEmbedReq", function(req) {
31378
- var model = req.embedModel;
31371
+ var model = _this.config.embedModel;
31379
31372
  if (!model) {
31380
31373
  throw new Error("Embed model not set");
31381
31374
  }
@@ -31739,7 +31732,7 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
31739
31732
  _inherits$9(AxAIAnthropic, AxBaseAI);
31740
31733
  var _super = _create_super$9(AxAIAnthropic);
31741
31734
  function AxAIAnthropic(param) {
31742
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
31735
+ var apiKey = param.apiKey, config = param.config, options = param.options;
31743
31736
  _class_call_check$i(this, AxAIAnthropic);
31744
31737
  var _this;
31745
31738
  if (!apiKey || apiKey === "") {
@@ -31761,13 +31754,12 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
31761
31754
  supportFor: {
31762
31755
  functions: true,
31763
31756
  streaming: true
31764
- },
31765
- modelMap: modelMap
31757
+ }
31766
31758
  });
31767
31759
  _define_property$i(_assert_this_initialized$9(_this), "config", void 0);
31768
31760
  _define_property$i(_assert_this_initialized$9(_this), "generateChatReq", function(req) {
31769
31761
  var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
31770
- var model = req.model;
31762
+ var model = _this.config.model;
31771
31763
  var apiConfig = {
31772
31764
  name: "/messages"
31773
31765
  };
@@ -32664,7 +32656,7 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32664
32656
  _inherits$8(AxAIGroq, AxAIOpenAI);
32665
32657
  var _super = _create_super$8(AxAIGroq);
32666
32658
  function AxAIGroq(param) {
32667
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
32659
+ var apiKey = param.apiKey, config = param.config, options = param.options;
32668
32660
  _class_call_check$g(this, AxAIGroq);
32669
32661
  var _this;
32670
32662
  if (!apiKey || apiKey === "") {
@@ -32679,8 +32671,7 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32679
32671
  config: _config,
32680
32672
  options: _options,
32681
32673
  apiURL: "https://api.groq.com/openai/v1",
32682
- modelInfo: [],
32683
- modelMap: modelMap
32674
+ modelInfo: []
32684
32675
  });
32685
32676
  _define_property$g(_assert_this_initialized$8(_this), "setOptions", function(options) {
32686
32677
  var rateLimiter = _this.newRateLimiter(options);
@@ -32736,23 +32727,19 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32736
32727
  }
32737
32728
  (AxAIOpenAI);
32738
32729
 
32739
- // cspell:ignore mistral, mixtral, codestral, nemo
32740
32730
  var AxAIMistralModel;
32741
32731
  (function(AxAIMistralModel) {
32742
32732
  AxAIMistralModel["Mistral7B"] = "open-mistral-7b";
32743
32733
  AxAIMistralModel["Mistral8x7B"] = "open-mixtral-8x7b";
32744
32734
  AxAIMistralModel["MistralSmall"] = "mistral-small-latest";
32735
+ AxAIMistralModel["MistralMedium"] = "mistral-medium-latest";
32745
32736
  AxAIMistralModel["MistralLarge"] = "mistral-large-latest";
32746
- AxAIMistralModel["Codestral"] = "codestral-latest";
32747
- AxAIMistralModel["OpenCodestralMamba"] = "open-codestral-mamba";
32748
- AxAIMistralModel["OpenMistralNemo"] = "open-mistral-nemo-latest";
32749
32737
  })(AxAIMistralModel || (AxAIMistralModel = {}));
32750
32738
  var AxAIMistralEmbedModels;
32751
32739
  (function(AxAIMistralEmbedModels) {
32752
32740
  AxAIMistralEmbedModels["MistralEmbed"] = "mistral-embed";
32753
32741
  })(AxAIMistralEmbedModels || (AxAIMistralEmbedModels = {}));
32754
32742
 
32755
- // cspell:ignore mistral, mixtral, codestral, nemo
32756
32743
  var axModelInfoMistral = [
32757
32744
  {
32758
32745
  name: AxAIMistralModel.Mistral7B,
@@ -32773,28 +32760,16 @@ var axModelInfoMistral = [
32773
32760
  completionTokenCostPer1M: 6
32774
32761
  },
32775
32762
  {
32776
- name: AxAIMistralModel.MistralLarge,
32777
- currency: "USD",
32778
- promptTokenCostPer1M: 8,
32779
- completionTokenCostPer1M: 24
32780
- },
32781
- {
32782
- name: AxAIMistralModel.Codestral,
32763
+ name: AxAIMistralModel.MistralMedium,
32783
32764
  currency: "USD",
32784
- promptTokenCostPer1M: 1,
32785
- completionTokenCostPer1M: 3
32765
+ promptTokenCostPer1M: 2.7,
32766
+ completionTokenCostPer1M: 8.1
32786
32767
  },
32787
32768
  {
32788
- name: AxAIMistralModel.OpenCodestralMamba,
32789
- currency: "USD",
32790
- promptTokenCostPer1M: 0.25,
32791
- completionTokenCostPer1M: 0.25
32792
- },
32793
- {
32794
- name: AxAIMistralModel.OpenMistralNemo,
32769
+ name: AxAIMistralModel.MistralLarge,
32795
32770
  currency: "USD",
32796
- promptTokenCostPer1M: 0.3,
32797
- completionTokenCostPer1M: 0.3
32771
+ promptTokenCostPer1M: 8,
32772
+ completionTokenCostPer1M: 24
32798
32773
  }
32799
32774
  ];
32800
32775
 
@@ -32929,7 +32904,7 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
32929
32904
  _inherits$7(AxAIMistral, AxAIOpenAI);
32930
32905
  var _super = _create_super$7(AxAIMistral);
32931
32906
  function AxAIMistral(param) {
32932
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
32907
+ var apiKey = param.apiKey, config = param.config, options = param.options;
32933
32908
  _class_call_check$f(this, AxAIMistral);
32934
32909
  if (!apiKey || apiKey === "") {
32935
32910
  throw new Error("Mistral API key not set");
@@ -32940,8 +32915,7 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
32940
32915
  config: _config,
32941
32916
  options: options,
32942
32917
  apiURL: "https://api.mistral.ai/v1",
32943
- modelInfo: axModelInfoMistral,
32944
- modelMap: modelMap
32918
+ modelInfo: axModelInfoMistral
32945
32919
  });
32946
32920
  _get$2((_assert_this_initialized$7(_this), _get_prototype_of$7(AxAIMistral.prototype)), "setName", _this).call(_this, "Mistral");
32947
32921
  return _this;
@@ -33105,7 +33079,7 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
33105
33079
  _inherits$6(AxAIDeepSeek, AxAIOpenAI);
33106
33080
  var _super = _create_super$6(AxAIDeepSeek);
33107
33081
  function AxAIDeepSeek(param) {
33108
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
33082
+ var apiKey = param.apiKey, config = param.config, options = param.options;
33109
33083
  _class_call_check$e(this, AxAIDeepSeek);
33110
33084
  if (!apiKey || apiKey === "") {
33111
33085
  throw new Error("DeepSeek API key not set");
@@ -33116,8 +33090,7 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
33116
33090
  config: _config,
33117
33091
  options: options,
33118
33092
  apiURL: "https://api.deepseek.com",
33119
- modelInfo: axModelInfoDeepSeek,
33120
- modelMap: modelMap
33093
+ modelInfo: axModelInfoDeepSeek
33121
33094
  });
33122
33095
  _get$1((_assert_this_initialized$6(_this), _get_prototype_of$6(AxAIDeepSeek.prototype)), "setName", _this).call(_this, "DeepSeek");
33123
33096
  return _this;
@@ -33285,15 +33258,14 @@ var axAIOllamaDefaultConfig = function() {
33285
33258
  _inherits$5(AxAIOllama, AxAIOpenAI);
33286
33259
  var _super = _create_super$5(AxAIOllama);
33287
33260
  function AxAIOllama(param) {
33288
- var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options, modelMap = param.modelMap;
33261
+ var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options;
33289
33262
  _class_call_check$d(this, AxAIOllama);
33290
33263
  var _config = _object_spread$6({}, axAIOllamaDefaultConfig(), config);
33291
33264
  var _this = _super.call(this, {
33292
33265
  apiKey: apiKey,
33293
33266
  options: options,
33294
33267
  config: _config,
33295
- apiURL: new URL("/api", url).href,
33296
- modelMap: modelMap
33268
+ apiURL: new URL("/v1", url).href
33297
33269
  });
33298
33270
  _get((_assert_this_initialized$5(_this), _get_prototype_of$5(AxAIOllama.prototype)), "setName", _this).call(_this, "Ollama");
33299
33271
  return _this;
@@ -33501,6 +33473,18 @@ var AxAI = /*#__PURE__*/ function() {
33501
33473
  }
33502
33474
  }
33503
33475
  _create_class$5(AxAI, [
33476
+ {
33477
+ key: "setModelMap",
33478
+ value: function setModelMap(modelMap) {
33479
+ this.ai.setModelMap(modelMap);
33480
+ }
33481
+ },
33482
+ {
33483
+ key: "setEmbedModelMap",
33484
+ value: function setEmbedModelMap(modelMap) {
33485
+ this.ai.setEmbedModelMap(modelMap);
33486
+ }
33487
+ },
33504
33488
  {
33505
33489
  key: "getName",
33506
33490
  value: function getName() {
@@ -33531,12 +33515,6 @@ var AxAI = /*#__PURE__*/ function() {
33531
33515
  return this.ai.getFeatures();
33532
33516
  }
33533
33517
  },
33534
- {
33535
- key: "getModelMap",
33536
- value: function getModelMap() {
33537
- return this.ai.getModelMap();
33538
- }
33539
- },
33540
33518
  {
33541
33519
  key: "chat",
33542
33520
  value: function chat(req, options) {
@@ -34095,8 +34073,8 @@ var assertRequiredFields = function(sig, values) {
34095
34073
  });
34096
34074
  if (missingFields.length > 0) {
34097
34075
  throw new AxAssertionError({
34098
- message: "Output must include: t: ".concat(missingFields.map(function(f) {
34099
- return "`".concat(f.title, ":`");
34076
+ message: "Missing required fields: ".concat(missingFields.map(function(f) {
34077
+ return f.name;
34100
34078
  }).join(", ")),
34101
34079
  values: values
34102
34080
  });
@@ -38902,7 +38880,7 @@ var AxPromptTemplate = function AxPromptTemplate(sig, fieldTemplates) {
38902
38880
  this.outputFormat = {
38903
38881
  type: "text",
38904
38882
  text: [
38905
- "Use the following output format."
38883
+ "Follow the following format."
38906
38884
  ].concat(_to_consumable_array$2(this.renderOutFields(this.sig.getOutputFields())), [
38907
38885
  "---\n\n"
38908
38886
  ]).join("\n\n")
@@ -40718,7 +40696,9 @@ let DspService = class DspService {
40718
40696
  }
40719
40697
  async modelOpenAI(params, _pinsSettingsList, context) {
40720
40698
  var _context_privates_OPENAI_API_KEY, _context_privates_OPENAI_SERVER;
40721
- const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config, options } = params;
40699
+ const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config = {
40700
+ model: 'gpt-4o-mini'
40701
+ }, options } = params;
40722
40702
  const modelInstance = new AxAIOpenAI({
40723
40703
  apiKey,
40724
40704
  apiURL,