@digipair/skill-dsp 0.21.14 → 0.22.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.cjs.js CHANGED
@@ -20358,7 +20358,6 @@ about the parse state.
20358
20358
  var _a;
20359
20359
  var depth = action >> 19 /* Action.ReduceDepthShift */ , type = action & 65535 /* Action.ValueMask */ ;
20360
20360
  var parser = this.p.parser;
20361
- if (this.reducePos < this.pos - 25 /* Lookahead.Margin */ ) this.setLookAhead(this.pos);
20362
20361
  var dPrec = parser.dynamicPrecedence(type);
20363
20362
  if (dPrec) this.score += dPrec;
20364
20363
  if (depth == 0) {
@@ -21311,7 +21310,7 @@ function cutAt(tree, pos, side) {
21311
21310
  cursor.moveTo(pos);
21312
21311
  for(;;){
21313
21312
  if (!(side < 0 ? cursor.childBefore(pos) : cursor.childAfter(pos))) for(;;){
21314
- if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Lookahead.Margin */ )) : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Lookahead.Margin */ ));
21313
+ if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Safety.Margin */ )) : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Safety.Margin */ ));
21315
21314
  if (side < 0 ? cursor.prevSibling() : cursor.nextSibling()) break;
21316
21315
  if (!cursor.parent()) return side < 0 ? 0 : tree.length;
21317
21316
  }
@@ -21434,7 +21433,7 @@ var TokenCache = /*#__PURE__*/ function() {
21434
21433
  token.mask = mask;
21435
21434
  token.context = context;
21436
21435
  }
21437
- if (token.lookAhead > token.end + 25 /* Lookahead.Margin */ ) lookAhead = Math.max(token.lookAhead, lookAhead);
21436
+ if (token.lookAhead > token.end + 25 /* Safety.Margin */ ) lookAhead = Math.max(token.lookAhead, lookAhead);
21438
21437
  if (token.value != 0 /* Term.Err */ ) {
21439
21438
  var startIndex = actionIndex;
21440
21439
  if (token.extended > -1) actionIndex = this.addActions(stack, token.extended, token.end, actionIndex);
@@ -23525,14 +23524,14 @@ function indent(str, spaces) {
23525
23524
  var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
23526
23525
  // match is required
23527
23526
  if (!match) {
23528
- return nextMatch = nextMatch1, tokens = tokens1, i = i1, {
23527
+ return tokens = tokens1, i = i1, nextMatch = nextMatch1, {
23529
23528
  v: nextMatch1
23530
23529
  };
23531
23530
  }
23532
23531
  var token = match.token, offset = match.offset;
23533
23532
  i1 += offset;
23534
23533
  if (token === " ") {
23535
- return nextMatch = nextMatch1, tokens = tokens1, i = i1, "continue";
23534
+ return tokens = tokens1, i = i1, nextMatch = nextMatch1, "continue";
23536
23535
  }
23537
23536
  tokens1 = _to_consumable_array$6(tokens1).concat([
23538
23537
  token
@@ -23551,7 +23550,7 @@ function indent(str, spaces) {
23551
23550
  if (contextKeys.some(function(el) {
23552
23551
  return el.startsWith(name);
23553
23552
  })) {
23554
- return nextMatch = nextMatch1, tokens = tokens1, i = i1, "continue";
23553
+ return tokens = tokens1, i = i1, nextMatch = nextMatch1, "continue";
23555
23554
  }
23556
23555
  if (dateTimeIdentifiers.some(function(el) {
23557
23556
  return el === name;
@@ -23570,9 +23569,9 @@ function indent(str, spaces) {
23570
23569
  if (dateTimeIdentifiers.some(function(el) {
23571
23570
  return el.startsWith(name);
23572
23571
  })) {
23573
- return nextMatch = nextMatch1, tokens = tokens1, i = i1, "continue";
23572
+ return tokens = tokens1, i = i1, nextMatch = nextMatch1, "continue";
23574
23573
  }
23575
- return nextMatch = nextMatch1, tokens = tokens1, i = i1, {
23574
+ return tokens = tokens1, i = i1, nextMatch = nextMatch1, {
23576
23575
  v: nextMatch1
23577
23576
  };
23578
23577
  };
@@ -24375,7 +24374,6 @@ var parser = LRParser.deserialize({
24375
24374
  },
24376
24375
  dynamicPrecedences: {
24377
24376
  "31": -1,
24378
- "67": 1,
24379
24377
  "71": -1,
24380
24378
  "73": -1
24381
24379
  },
@@ -27378,7 +27376,11 @@ const applyTemplate = (value, context)=>{
27378
27376
  if (result.startsWith('EVALUATE:')) {
27379
27377
  const path = result.replace(/^EVALUATE:/, '');
27380
27378
  result = evaluate(path, _extends({}, context, {
27381
- getTime: (time)=>new Date(time).getTime()
27379
+ getTime: (time)=>new Date(time).getTime(),
27380
+ atob: (value)=>atob(value),
27381
+ btoa: (value)=>btoa(value),
27382
+ encodeURIComponent: (value)=>encodeURIComponent(value),
27383
+ decodeURIComponent: (value)=>decodeURIComponent(value)
27382
27384
  }));
27383
27385
  }
27384
27386
  } else if (typeof value === 'object' && Array.isArray(value)) {
@@ -28633,7 +28635,7 @@ var axBaseAIDefaultConfig = function() {
28633
28635
  };
28634
28636
  var AxBaseAI = /*#__PURE__*/ function() {
28635
28637
  function AxBaseAI(param) {
28636
- var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor, modelMap = param.modelMap;
28638
+ var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor;
28637
28639
  _class_call_check$p(this, AxBaseAI);
28638
28640
  _define_property$p(this, "generateChatReq", void 0);
28639
28641
  _define_property$p(this, "generateEmbedReq", void 0);
@@ -28645,29 +28647,48 @@ var AxBaseAI = /*#__PURE__*/ function() {
28645
28647
  _define_property$p(this, "fetch", void 0);
28646
28648
  _define_property$p(this, "tracer", void 0);
28647
28649
  _define_property$p(this, "modelMap", void 0);
28648
- _define_property$p(this, "modelInfo", void 0);
28650
+ _define_property$p(this, "embedModelMap", void 0);
28649
28651
  _define_property$p(this, "modelUsage", void 0);
28650
28652
  _define_property$p(this, "embedModelUsage", void 0);
28651
- _define_property$p(this, "models", void 0);
28652
28653
  _define_property$p(this, "apiURL", void 0);
28653
28654
  _define_property$p(this, "name", void 0);
28654
28655
  _define_property$p(this, "headers", void 0);
28656
+ _define_property$p(this, "modelInfo", void 0);
28657
+ _define_property$p(this, "embedModelInfo", void 0);
28655
28658
  _define_property$p(this, "supportFor", void 0);
28656
28659
  this.name = name;
28657
28660
  this.apiURL = apiURL;
28658
28661
  this.headers = headers;
28659
28662
  this.supportFor = supportFor;
28660
28663
  this.tracer = options.tracer;
28661
- this.modelInfo = modelInfo;
28662
- this.modelMap = modelMap;
28663
- var _modelMap_models_model, _models_embedModel, _modelMap_;
28664
- this.models = {
28665
- model: (_modelMap_models_model = modelMap === null || modelMap === void 0 ? void 0 : modelMap[models.model]) !== null && _modelMap_models_model !== void 0 ? _modelMap_models_model : models.model,
28666
- embedModel: (_modelMap_ = modelMap === null || modelMap === void 0 ? void 0 : modelMap[(_models_embedModel = models.embedModel) !== null && _models_embedModel !== void 0 ? _models_embedModel : ""]) !== null && _modelMap_ !== void 0 ? _modelMap_ : models.embedModel
28667
- };
28668
- if (!models.model || typeof models.model !== "string" || models.model === "") {
28664
+ var model = this.getModel(models.model);
28665
+ var embedModel = this.getEmbedModel(models.embedModel);
28666
+ if (typeof model === "string") {
28667
+ var modelName = model.replace(/-0\d+$|-\d{2,}$/, "");
28668
+ var _modelInfo_filter_at;
28669
+ this.modelInfo = (_modelInfo_filter_at = modelInfo.filter(function(v) {
28670
+ return v.name === modelName;
28671
+ }).at(0)) !== null && _modelInfo_filter_at !== void 0 ? _modelInfo_filter_at : {
28672
+ name: model,
28673
+ currency: "usd",
28674
+ promptTokenCostPer1M: 0,
28675
+ completionTokenCostPer1M: 0
28676
+ };
28677
+ } else {
28669
28678
  throw new Error("No model defined");
28670
28679
  }
28680
+ if (typeof embedModel === "string") {
28681
+ var embedModelName = embedModel === null || embedModel === void 0 ? void 0 : embedModel.replace(/-0\d+$|-\d{2,}$/, "");
28682
+ var _modelInfo_filter_at1;
28683
+ this.embedModelInfo = (_modelInfo_filter_at1 = modelInfo.filter(function(v) {
28684
+ return v.name === embedModelName;
28685
+ }).at(0)) !== null && _modelInfo_filter_at1 !== void 0 ? _modelInfo_filter_at1 : {
28686
+ name: embedModel !== null && embedModel !== void 0 ? embedModel : "",
28687
+ currency: "usd",
28688
+ promptTokenCostPer1M: 0,
28689
+ completionTokenCostPer1M: 0
28690
+ };
28691
+ }
28671
28692
  this.setOptions(options);
28672
28693
  }
28673
28694
  _create_class$c(AxBaseAI, [
@@ -28707,27 +28728,21 @@ var AxBaseAI = /*#__PURE__*/ function() {
28707
28728
  }
28708
28729
  },
28709
28730
  {
28710
- key: "_getModelInfo",
28711
- value: function _getModelInfo(model) {
28712
- var _this_modelMap;
28713
- var _this_modelMap_model;
28714
- var _model = (_this_modelMap_model = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[model]) !== null && _this_modelMap_model !== void 0 ? _this_modelMap_model : model;
28715
- var modelName = _model.replace(/-0\d+$|-\d{2,}$/, "");
28716
- var _this_modelInfo_filter_at;
28717
- return (_this_modelInfo_filter_at = this.modelInfo.filter(function(v) {
28718
- return v.name === modelName;
28719
- }).at(0)) !== null && _this_modelInfo_filter_at !== void 0 ? _this_modelInfo_filter_at : {
28720
- name: model,
28721
- currency: "usd",
28722
- promptTokenCostPer1M: 0,
28723
- completionTokenCostPer1M: 0
28724
- };
28731
+ key: "setModelMap",
28732
+ value: function setModelMap(modelMap) {
28733
+ this.modelMap = modelMap;
28734
+ }
28735
+ },
28736
+ {
28737
+ key: "setEmbedModelMap",
28738
+ value: function setEmbedModelMap(embedModelMap) {
28739
+ this.embedModelMap = embedModelMap;
28725
28740
  }
28726
28741
  },
28727
28742
  {
28728
28743
  key: "getModelInfo",
28729
28744
  value: function getModelInfo() {
28730
- return _object_spread_props$8(_object_spread$h({}, this._getModelInfo(this.models.model)), {
28745
+ return _object_spread_props$8(_object_spread$h({}, this.modelInfo), {
28731
28746
  provider: this.name
28732
28747
  });
28733
28748
  }
@@ -28735,17 +28750,9 @@ var AxBaseAI = /*#__PURE__*/ function() {
28735
28750
  {
28736
28751
  key: "getEmbedModelInfo",
28737
28752
  value: function getEmbedModelInfo() {
28738
- if (this.models.embedModel) {
28739
- return _object_spread_props$8(_object_spread$h({}, this._getModelInfo(this.models.embedModel)), {
28740
- provider: this.name
28741
- });
28742
- }
28743
- }
28744
- },
28745
- {
28746
- key: "getModelMap",
28747
- value: function getModelMap() {
28748
- return this.modelMap;
28753
+ return this.embedModelInfo ? _object_spread_props$8(_object_spread$h({}, this.embedModelInfo), {
28754
+ provider: this.name
28755
+ }) : undefined;
28749
28756
  }
28750
28757
  },
28751
28758
  {
@@ -28768,14 +28775,13 @@ var AxBaseAI = /*#__PURE__*/ function() {
28768
28775
  },
28769
28776
  {
28770
28777
  key: "chat",
28771
- value: function chat(req, options) {
28778
+ value: function chat(_req, options) {
28772
28779
  var _this = this;
28773
28780
  return _async_to_generator$7(function() {
28774
- var _this_modelMap, _this_modelMap_req_model, model, _mc_stopSequences, _this_tracer, mc, _obj;
28781
+ var _mc_stopSequences, _this_tracer, mc, _obj;
28775
28782
  return _ts_generator$8(this, function(_state) {
28776
28783
  switch(_state.label){
28777
28784
  case 0:
28778
- model = req.model ? (_this_modelMap_req_model = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.model]) !== null && _this_modelMap_req_model !== void 0 ? _this_modelMap_req_model : req.model : _this.models.model;
28779
28785
  if (!_this.tracer) return [
28780
28786
  3,
28781
28787
  2
@@ -28785,7 +28791,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28785
28791
  4,
28786
28792
  (_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Chat Request", {
28787
28793
  kind: AxSpanKind.SERVER,
28788
- attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, model), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MAX_TOKENS, mc.maxTokens), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TEMPERATURE, mc.temperature), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_P, mc.topP), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_K, mc.topK), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_FREQUENCY_PENALTY, mc.frequencyPenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_PRESENCE_PENALTY, mc.presencePenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_STOP_SEQUENCES, (_mc_stopSequences = mc.stopSequences) === null || _mc_stopSequences === void 0 ? void 0 : _mc_stopSequences.join(", ")), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING, mc.stream), _obj)
28794
+ attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.modelInfo.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MAX_TOKENS, mc.maxTokens), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TEMPERATURE, mc.temperature), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_P, mc.topP), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_K, mc.topK), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_FREQUENCY_PENALTY, mc.frequencyPenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_PRESENCE_PENALTY, mc.presencePenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_STOP_SEQUENCES, (_mc_stopSequences = mc.stopSequences) === null || _mc_stopSequences === void 0 ? void 0 : _mc_stopSequences.join(", ")), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING, mc.stream), _obj)
28789
28795
  }, function() {
28790
28796
  var _ref = _async_to_generator$7(function(span) {
28791
28797
  var res;
@@ -28794,7 +28800,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28794
28800
  case 0:
28795
28801
  return [
28796
28802
  4,
28797
- _this._chat(model, req, options, span)
28803
+ _this._chat(_req, options, span)
28798
28804
  ];
28799
28805
  case 1:
28800
28806
  res = _state.sent();
@@ -28819,7 +28825,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28819
28825
  case 2:
28820
28826
  return [
28821
28827
  4,
28822
- _this._chat(model, req, options)
28828
+ _this._chat(_req, options)
28823
28829
  ];
28824
28830
  case 3:
28825
28831
  return [
@@ -28833,10 +28839,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
28833
28839
  },
28834
28840
  {
28835
28841
  key: "_chat",
28836
- value: function _chat(model, chatReq, options, span) {
28842
+ value: function _chat(_req, options, span) {
28837
28843
  var _this = this;
28838
28844
  return _async_to_generator$7(function() {
28839
- var _chatReq_modelConfig, reqFn, _options_stream, stream, functions, req, fn, rv, _tmp, respFn, wrappedRespFn, doneCb, st, res;
28845
+ var _req_modelConfig, reqFn, _options_stream, stream, functions, req, fn, rv, _tmp, respFn, wrappedRespFn, doneCb, st, res;
28840
28846
  return _ts_generator$8(this, function(_state) {
28841
28847
  switch(_state.label){
28842
28848
  case 0:
@@ -28844,14 +28850,11 @@ var AxBaseAI = /*#__PURE__*/ function() {
28844
28850
  throw new Error("generateChatReq not implemented");
28845
28851
  }
28846
28852
  reqFn = _this.generateChatReq;
28847
- stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (_chatReq_modelConfig = chatReq.modelConfig) === null || _chatReq_modelConfig === void 0 ? void 0 : _chatReq_modelConfig.stream;
28848
- if (chatReq.functions && chatReq.functions.length > 0) {
28849
- functions = chatReq.functions;
28850
- }
28851
- req = _object_spread_props$8(_object_spread$h({}, chatReq), {
28852
- model: model,
28853
+ stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (_req_modelConfig = _req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream;
28854
+ functions = _req.functions && _req.functions.length > 0 ? _req.functions : undefined;
28855
+ req = _object_spread_props$8(_object_spread$h({}, _req), {
28853
28856
  functions: functions,
28854
- modelConfig: _object_spread_props$8(_object_spread$h({}, chatReq.modelConfig), {
28857
+ modelConfig: _object_spread_props$8(_object_spread$h({}, _req.modelConfig), {
28855
28858
  stream: stream
28856
28859
  })
28857
28860
  });
@@ -28988,14 +28991,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
28988
28991
  value: function embed(req, options) {
28989
28992
  var _this = this;
28990
28993
  return _async_to_generator$7(function() {
28991
- var _this_modelMap, _this_modelMap_req_embedModel, embedModel, _this_tracer, _req_embedModel, _obj;
28994
+ var _this_tracer, _obj;
28992
28995
  return _ts_generator$8(this, function(_state) {
28993
28996
  switch(_state.label){
28994
28997
  case 0:
28995
- embedModel = req.embedModel ? (_this_modelMap_req_embedModel = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.embedModel]) !== null && _this_modelMap_req_embedModel !== void 0 ? _this_modelMap_req_embedModel : req.embedModel : _this.models.embedModel;
28996
- if (!embedModel) {
28997
- throw new Error("No embed model defined");
28998
- }
28999
28998
  if (!_this.tracer) return [
29000
28999
  3,
29001
29000
  2
@@ -29004,7 +29003,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
29004
29003
  4,
29005
29004
  (_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Embed Request", {
29006
29005
  kind: AxSpanKind.SERVER,
29007
- attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, (_req_embedModel = req.embedModel) !== null && _req_embedModel !== void 0 ? _req_embedModel : _this.models.embedModel), _obj)
29006
+ attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.modelInfo.name), _obj)
29008
29007
  }, function() {
29009
29008
  var _ref = _async_to_generator$7(function(span) {
29010
29009
  var res;
@@ -29013,7 +29012,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
29013
29012
  case 0:
29014
29013
  return [
29015
29014
  4,
29016
- _this._embed(embedModel, req, options, span)
29015
+ _this._embed(req, options, span)
29017
29016
  ];
29018
29017
  case 1:
29019
29018
  res = _state.sent();
@@ -29038,7 +29037,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
29038
29037
  case 2:
29039
29038
  return [
29040
29039
  2,
29041
- _this._embed(embedModel, req, options)
29040
+ _this._embed(req, options)
29042
29041
  ];
29043
29042
  }
29044
29043
  });
@@ -29047,10 +29046,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
29047
29046
  },
29048
29047
  {
29049
29048
  key: "_embed",
29050
- value: function _embed(embedModel, embedReq, options, span) {
29049
+ value: function _embed(req, options, span) {
29051
29050
  var _this = this;
29052
29051
  return _async_to_generator$7(function() {
29053
- var req, fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
29052
+ var fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
29054
29053
  return _ts_generator$8(this, function(_state) {
29055
29054
  switch(_state.label){
29056
29055
  case 0:
@@ -29060,9 +29059,6 @@ var AxBaseAI = /*#__PURE__*/ function() {
29060
29059
  if (!_this.generateEmbedResp) {
29061
29060
  throw new Error("generateEmbedResp not implemented");
29062
29061
  }
29063
- req = _object_spread_props$8(_object_spread$h({}, embedReq), {
29064
- embedModel: embedModel
29065
- });
29066
29062
  fn = function() {
29067
29063
  var _ref = _async_to_generator$7(function() {
29068
29064
  var _this_generateEmbedReq, apiConfig, reqValue, res;
@@ -29144,6 +29140,22 @@ var AxBaseAI = /*#__PURE__*/ function() {
29144
29140
  var headers = arguments.length > 0 && arguments[0] !== void 0 ? arguments[0] : {};
29145
29141
  return _object_spread$h({}, headers, this.headers);
29146
29142
  }
29143
+ },
29144
+ {
29145
+ key: "getEmbedModel",
29146
+ value: function getEmbedModel(name) {
29147
+ var _this_embedModelMap;
29148
+ var _this_embedModelMap_name;
29149
+ return name ? (_this_embedModelMap_name = (_this_embedModelMap = this.embedModelMap) === null || _this_embedModelMap === void 0 ? void 0 : _this_embedModelMap[name]) !== null && _this_embedModelMap_name !== void 0 ? _this_embedModelMap_name : name : undefined;
29150
+ }
29151
+ },
29152
+ {
29153
+ key: "getModel",
29154
+ value: function getModel(name) {
29155
+ var _this_modelMap;
29156
+ var _this_modelMap_name;
29157
+ return (_this_modelMap_name = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[name]) !== null && _this_modelMap_name !== void 0 ? _this_modelMap_name : name;
29158
+ }
29147
29159
  }
29148
29160
  ]);
29149
29161
  return AxBaseAI;
@@ -29307,7 +29319,6 @@ var AxAIOpenAIModel;
29307
29319
  (function(AxAIOpenAIModel) {
29308
29320
  AxAIOpenAIModel["GPT4"] = "gpt-4";
29309
29321
  AxAIOpenAIModel["GPT4O"] = "gpt-4o";
29310
- AxAIOpenAIModel["GPT4OMini"] = "gpt-4o-mini";
29311
29322
  AxAIOpenAIModel["GPT4Turbo"] = "gpt-4-turbo";
29312
29323
  AxAIOpenAIModel["GPT35Turbo"] = "gpt-3.5-turbo";
29313
29324
  AxAIOpenAIModel["GPT35TurboInstruct"] = "gpt-3.5-turbo-instruct";
@@ -29338,12 +29349,6 @@ var AxAIOpenAIEmbedModel;
29338
29349
  promptTokenCostPer1M: 5,
29339
29350
  completionTokenCostPer1M: 15
29340
29351
  },
29341
- {
29342
- name: AxAIOpenAIModel.GPT4OMini,
29343
- currency: "usd",
29344
- promptTokenCostPer1M: 0.15,
29345
- completionTokenCostPer1M: 0.6
29346
- },
29347
29352
  {
29348
29353
  name: AxAIOpenAIModel.GPT4Turbo,
29349
29354
  currency: "usd",
@@ -29499,7 +29504,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29499
29504
  _inherits$f(AxAIOpenAI, AxBaseAI);
29500
29505
  var _super = _create_super$f(AxAIOpenAI);
29501
29506
  function AxAIOpenAI(param) {
29502
- var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo, modelMap = param.modelMap;
29507
+ var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo;
29503
29508
  _class_call_check$o(this, AxAIOpenAI);
29504
29509
  var _this;
29505
29510
  if (!apiKey || apiKey === "") {
@@ -29521,16 +29526,14 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29521
29526
  supportFor: {
29522
29527
  functions: true,
29523
29528
  streaming: true
29524
- },
29525
- modelMap: modelMap
29529
+ }
29526
29530
  });
29527
29531
  _define_property$o(_assert_this_initialized$f(_this), "config", void 0);
29528
29532
  _define_property$o(_assert_this_initialized$f(_this), "streamingUsage", void 0);
29529
- _define_property$o(_assert_this_initialized$f(_this), "dimensions", void 0);
29530
29533
  _define_property$o(_assert_this_initialized$f(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
29531
29534
  _config) {
29532
29535
  var _req_functions, _req_modelConfig, _req_modelConfig1, _this_config, _this_config1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6, _req_modelConfig7;
29533
- var model = req.model;
29536
+ var model = _this.config.model;
29534
29537
  if (!req.chatPrompt || req.chatPrompt.length === 0) {
29535
29538
  throw new Error("Chat prompt is empty");
29536
29539
  }
@@ -29583,7 +29586,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29583
29586
  ];
29584
29587
  });
29585
29588
  _define_property$o(_assert_this_initialized$f(_this), "generateEmbedReq", function(req) {
29586
- var model = req.embedModel;
29589
+ var model = _this.config.embedModel;
29587
29590
  if (!model) {
29588
29591
  throw new Error("Embed model not set");
29589
29592
  }
@@ -29595,8 +29598,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29595
29598
  };
29596
29599
  var reqValue = {
29597
29600
  model: model,
29598
- input: req.texts,
29599
- dimensions: _this.dimensions
29601
+ input: req.texts
29600
29602
  };
29601
29603
  return [
29602
29604
  apiConfig,
@@ -29705,7 +29707,6 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29705
29707
  _this.config = _config;
29706
29708
  var _options_streamingUsage;
29707
29709
  _this.streamingUsage = (_options_streamingUsage = options === null || options === void 0 ? void 0 : options.streamingUsage) !== null && _options_streamingUsage !== void 0 ? _options_streamingUsage : true;
29708
- _this.dimensions = config === null || config === void 0 ? void 0 : config.dimensions;
29709
29710
  return _this;
29710
29711
  }
29711
29712
  _create_class$b(AxAIOpenAI, [
@@ -29939,7 +29940,7 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
29939
29940
  _inherits$e(AxAIAzureOpenAI, AxAIOpenAI);
29940
29941
  var _super = _create_super$e(AxAIAzureOpenAI);
29941
29942
  function AxAIAzureOpenAI(param) {
29942
- var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options, modelMap = param.modelMap;
29943
+ var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options;
29943
29944
  _class_call_check$n(this, AxAIAzureOpenAI);
29944
29945
  if (!apiKey || apiKey === "") {
29945
29946
  throw new Error("Azure OpenAPI API key not set");
@@ -29954,8 +29955,7 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
29954
29955
  var _this = _super.call(this, {
29955
29956
  apiKey: apiKey,
29956
29957
  config: _config,
29957
- options: options,
29958
- modelMap: modelMap
29958
+ options: options
29959
29959
  });
29960
29960
  var host = resourceName.includes("://") ? resourceName : "https://".concat(resourceName, ".openai.azure.com/");
29961
29961
  _get$5((_assert_this_initialized$e(_this), _get_prototype_of$e(AxAIAzureOpenAI.prototype)), "setName", _this).call(_this, "Azure OpenAI");
@@ -30101,7 +30101,7 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
30101
30101
  _inherits$d(AxAIHuggingFace, AxBaseAI);
30102
30102
  var _super = _create_super$d(AxAIHuggingFace);
30103
30103
  function AxAIHuggingFace(param) {
30104
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30104
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30105
30105
  _class_call_check$m(this, AxAIHuggingFace);
30106
30106
  var _this;
30107
30107
  if (!apiKey || apiKey === "") {
@@ -30122,14 +30122,13 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
30122
30122
  supportFor: {
30123
30123
  functions: false,
30124
30124
  streaming: false
30125
- },
30126
- modelMap: modelMap
30125
+ }
30127
30126
  });
30128
30127
  _define_property$m(_assert_this_initialized$d(_this), "config", void 0);
30129
30128
  _define_property$m(_assert_this_initialized$d(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
30130
30129
  _config) {
30131
30130
  var _req_chatPrompt, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4;
30132
- var model = req.model;
30131
+ var model = _this.config.model;
30133
30132
  var functionsList = req.functions ? "Functions:\n".concat(JSON.stringify(req.functions, null, 2), "\n") : "";
30134
30133
  var prompt = (_req_chatPrompt = req.chatPrompt) === null || _req_chatPrompt === void 0 ? void 0 : _req_chatPrompt.map(function(msg) {
30135
30134
  switch(msg.role){
@@ -30351,7 +30350,7 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
30351
30350
  _inherits$c(AxAITogether, AxAIOpenAI);
30352
30351
  var _super = _create_super$c(AxAITogether);
30353
30352
  function AxAITogether(param) {
30354
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30353
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30355
30354
  _class_call_check$l(this, AxAITogether);
30356
30355
  if (!apiKey || apiKey === "") {
30357
30356
  throw new Error("Together API key not set");
@@ -30362,8 +30361,7 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
30362
30361
  config: _config,
30363
30362
  options: options,
30364
30363
  apiURL: "https://api.together.xyz/v1",
30365
- modelInfo: axModelInfoTogether,
30366
- modelMap: modelMap
30364
+ modelInfo: axModelInfoTogether
30367
30365
  });
30368
30366
  _get$4((_assert_this_initialized$c(_this), _get_prototype_of$c(AxAITogether.prototype)), "setName", _this).call(_this, "Together");
30369
30367
  return _this;
@@ -30637,7 +30635,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30637
30635
  _inherits$b(AxAICohere, AxBaseAI);
30638
30636
  var _super = _create_super$b(AxAICohere);
30639
30637
  function AxAICohere(param) {
30640
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30638
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30641
30639
  _class_call_check$k(this, AxAICohere);
30642
30640
  var _this;
30643
30641
  if (!apiKey || apiKey === "") {
@@ -30658,14 +30656,13 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30658
30656
  functions: true,
30659
30657
  streaming: true
30660
30658
  },
30661
- options: options,
30662
- modelMap: modelMap
30659
+ options: options
30663
30660
  });
30664
30661
  _define_property$k(_assert_this_initialized$b(_this), "config", void 0);
30665
30662
  _define_property$k(_assert_this_initialized$b(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
30666
30663
  _config) {
30667
30664
  var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6;
30668
- var model = req.model;
30665
+ var model = _this.config.model;
30669
30666
  var lastChatMsg = req.chatPrompt.at(-1);
30670
30667
  var restOfChat = req.chatPrompt.slice(0, -1);
30671
30668
  var message;
@@ -30760,7 +30757,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30760
30757
  ];
30761
30758
  });
30762
30759
  _define_property$k(_assert_this_initialized$b(_this), "generateEmbedReq", function(req) {
30763
- var model = req.embedModel;
30760
+ var model = _this.config.embedModel;
30764
30761
  if (!model) {
30765
30762
  throw new Error("Embed model not set");
30766
30763
  }
@@ -31159,7 +31156,7 @@ var safetySettings = [
31159
31156
  _inherits$a(AxAIGoogleGemini, AxBaseAI);
31160
31157
  var _super = _create_super$a(AxAIGoogleGemini);
31161
31158
  function AxAIGoogleGemini(param) {
31162
- var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options, modelMap = param.modelMap;
31159
+ var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options;
31163
31160
  _class_call_check$j(this, AxAIGoogleGemini);
31164
31161
  var _this;
31165
31162
  if (!apiKey || apiKey === "") {
@@ -31183,15 +31180,14 @@ var safetySettings = [
31183
31180
  supportFor: {
31184
31181
  functions: true,
31185
31182
  streaming: true
31186
- },
31187
- modelMap: modelMap
31183
+ }
31188
31184
  });
31189
31185
  _define_property$j(_assert_this_initialized$a(_this), "options", void 0);
31190
31186
  _define_property$j(_assert_this_initialized$a(_this), "config", void 0);
31191
31187
  _define_property$j(_assert_this_initialized$a(_this), "apiKey", void 0);
31192
31188
  _define_property$j(_assert_this_initialized$a(_this), "generateChatReq", function(req) {
31193
31189
  var _req_modelConfig, _this_options, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
31194
- var model = req.model;
31190
+ var model = _this.config.model;
31195
31191
  var _req_modelConfig_stream;
31196
31192
  var stream = (_req_modelConfig_stream = (_req_modelConfig = req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream) !== null && _req_modelConfig_stream !== void 0 ? _req_modelConfig_stream : _this.config.stream;
31197
31193
  if (!req.chatPrompt || req.chatPrompt.length === 0) {
@@ -31371,7 +31367,7 @@ var safetySettings = [
31371
31367
  ];
31372
31368
  });
31373
31369
  _define_property$j(_assert_this_initialized$a(_this), "generateEmbedReq", function(req) {
31374
- var model = req.embedModel;
31370
+ var model = _this.config.embedModel;
31375
31371
  if (!model) {
31376
31372
  throw new Error("Embed model not set");
31377
31373
  }
@@ -31735,7 +31731,7 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
31735
31731
  _inherits$9(AxAIAnthropic, AxBaseAI);
31736
31732
  var _super = _create_super$9(AxAIAnthropic);
31737
31733
  function AxAIAnthropic(param) {
31738
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
31734
+ var apiKey = param.apiKey, config = param.config, options = param.options;
31739
31735
  _class_call_check$i(this, AxAIAnthropic);
31740
31736
  var _this;
31741
31737
  if (!apiKey || apiKey === "") {
@@ -31757,13 +31753,12 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
31757
31753
  supportFor: {
31758
31754
  functions: true,
31759
31755
  streaming: true
31760
- },
31761
- modelMap: modelMap
31756
+ }
31762
31757
  });
31763
31758
  _define_property$i(_assert_this_initialized$9(_this), "config", void 0);
31764
31759
  _define_property$i(_assert_this_initialized$9(_this), "generateChatReq", function(req) {
31765
31760
  var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
31766
- var model = req.model;
31761
+ var model = _this.config.model;
31767
31762
  var apiConfig = {
31768
31763
  name: "/messages"
31769
31764
  };
@@ -32660,7 +32655,7 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32660
32655
  _inherits$8(AxAIGroq, AxAIOpenAI);
32661
32656
  var _super = _create_super$8(AxAIGroq);
32662
32657
  function AxAIGroq(param) {
32663
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
32658
+ var apiKey = param.apiKey, config = param.config, options = param.options;
32664
32659
  _class_call_check$g(this, AxAIGroq);
32665
32660
  var _this;
32666
32661
  if (!apiKey || apiKey === "") {
@@ -32675,8 +32670,7 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32675
32670
  config: _config,
32676
32671
  options: _options,
32677
32672
  apiURL: "https://api.groq.com/openai/v1",
32678
- modelInfo: [],
32679
- modelMap: modelMap
32673
+ modelInfo: []
32680
32674
  });
32681
32675
  _define_property$g(_assert_this_initialized$8(_this), "setOptions", function(options) {
32682
32676
  var rateLimiter = _this.newRateLimiter(options);
@@ -32732,23 +32726,19 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32732
32726
  }
32733
32727
  (AxAIOpenAI);
32734
32728
 
32735
- // cspell:ignore mistral, mixtral, codestral, nemo
32736
32729
  var AxAIMistralModel;
32737
32730
  (function(AxAIMistralModel) {
32738
32731
  AxAIMistralModel["Mistral7B"] = "open-mistral-7b";
32739
32732
  AxAIMistralModel["Mistral8x7B"] = "open-mixtral-8x7b";
32740
32733
  AxAIMistralModel["MistralSmall"] = "mistral-small-latest";
32734
+ AxAIMistralModel["MistralMedium"] = "mistral-medium-latest";
32741
32735
  AxAIMistralModel["MistralLarge"] = "mistral-large-latest";
32742
- AxAIMistralModel["Codestral"] = "codestral-latest";
32743
- AxAIMistralModel["OpenCodestralMamba"] = "open-codestral-mamba";
32744
- AxAIMistralModel["OpenMistralNemo"] = "open-mistral-nemo-latest";
32745
32736
  })(AxAIMistralModel || (AxAIMistralModel = {}));
32746
32737
  var AxAIMistralEmbedModels;
32747
32738
  (function(AxAIMistralEmbedModels) {
32748
32739
  AxAIMistralEmbedModels["MistralEmbed"] = "mistral-embed";
32749
32740
  })(AxAIMistralEmbedModels || (AxAIMistralEmbedModels = {}));
32750
32741
 
32751
- // cspell:ignore mistral, mixtral, codestral, nemo
32752
32742
  var axModelInfoMistral = [
32753
32743
  {
32754
32744
  name: AxAIMistralModel.Mistral7B,
@@ -32769,28 +32759,16 @@ var axModelInfoMistral = [
32769
32759
  completionTokenCostPer1M: 6
32770
32760
  },
32771
32761
  {
32772
- name: AxAIMistralModel.MistralLarge,
32773
- currency: "USD",
32774
- promptTokenCostPer1M: 8,
32775
- completionTokenCostPer1M: 24
32776
- },
32777
- {
32778
- name: AxAIMistralModel.Codestral,
32762
+ name: AxAIMistralModel.MistralMedium,
32779
32763
  currency: "USD",
32780
- promptTokenCostPer1M: 1,
32781
- completionTokenCostPer1M: 3
32764
+ promptTokenCostPer1M: 2.7,
32765
+ completionTokenCostPer1M: 8.1
32782
32766
  },
32783
32767
  {
32784
- name: AxAIMistralModel.OpenCodestralMamba,
32785
- currency: "USD",
32786
- promptTokenCostPer1M: 0.25,
32787
- completionTokenCostPer1M: 0.25
32788
- },
32789
- {
32790
- name: AxAIMistralModel.OpenMistralNemo,
32768
+ name: AxAIMistralModel.MistralLarge,
32791
32769
  currency: "USD",
32792
- promptTokenCostPer1M: 0.3,
32793
- completionTokenCostPer1M: 0.3
32770
+ promptTokenCostPer1M: 8,
32771
+ completionTokenCostPer1M: 24
32794
32772
  }
32795
32773
  ];
32796
32774
 
@@ -32925,7 +32903,7 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
32925
32903
  _inherits$7(AxAIMistral, AxAIOpenAI);
32926
32904
  var _super = _create_super$7(AxAIMistral);
32927
32905
  function AxAIMistral(param) {
32928
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
32906
+ var apiKey = param.apiKey, config = param.config, options = param.options;
32929
32907
  _class_call_check$f(this, AxAIMistral);
32930
32908
  if (!apiKey || apiKey === "") {
32931
32909
  throw new Error("Mistral API key not set");
@@ -32936,8 +32914,7 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
32936
32914
  config: _config,
32937
32915
  options: options,
32938
32916
  apiURL: "https://api.mistral.ai/v1",
32939
- modelInfo: axModelInfoMistral,
32940
- modelMap: modelMap
32917
+ modelInfo: axModelInfoMistral
32941
32918
  });
32942
32919
  _get$2((_assert_this_initialized$7(_this), _get_prototype_of$7(AxAIMistral.prototype)), "setName", _this).call(_this, "Mistral");
32943
32920
  return _this;
@@ -33101,7 +33078,7 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
33101
33078
  _inherits$6(AxAIDeepSeek, AxAIOpenAI);
33102
33079
  var _super = _create_super$6(AxAIDeepSeek);
33103
33080
  function AxAIDeepSeek(param) {
33104
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
33081
+ var apiKey = param.apiKey, config = param.config, options = param.options;
33105
33082
  _class_call_check$e(this, AxAIDeepSeek);
33106
33083
  if (!apiKey || apiKey === "") {
33107
33084
  throw new Error("DeepSeek API key not set");
@@ -33112,8 +33089,7 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
33112
33089
  config: _config,
33113
33090
  options: options,
33114
33091
  apiURL: "https://api.deepseek.com",
33115
- modelInfo: axModelInfoDeepSeek,
33116
- modelMap: modelMap
33092
+ modelInfo: axModelInfoDeepSeek
33117
33093
  });
33118
33094
  _get$1((_assert_this_initialized$6(_this), _get_prototype_of$6(AxAIDeepSeek.prototype)), "setName", _this).call(_this, "DeepSeek");
33119
33095
  return _this;
@@ -33281,15 +33257,14 @@ var axAIOllamaDefaultConfig = function() {
33281
33257
  _inherits$5(AxAIOllama, AxAIOpenAI);
33282
33258
  var _super = _create_super$5(AxAIOllama);
33283
33259
  function AxAIOllama(param) {
33284
- var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options, modelMap = param.modelMap;
33260
+ var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options;
33285
33261
  _class_call_check$d(this, AxAIOllama);
33286
33262
  var _config = _object_spread$6({}, axAIOllamaDefaultConfig(), config);
33287
33263
  var _this = _super.call(this, {
33288
33264
  apiKey: apiKey,
33289
33265
  options: options,
33290
33266
  config: _config,
33291
- apiURL: new URL("/api", url).href,
33292
- modelMap: modelMap
33267
+ apiURL: new URL("/v1", url).href
33293
33268
  });
33294
33269
  _get((_assert_this_initialized$5(_this), _get_prototype_of$5(AxAIOllama.prototype)), "setName", _this).call(_this, "Ollama");
33295
33270
  return _this;
@@ -33497,6 +33472,18 @@ var AxAI = /*#__PURE__*/ function() {
33497
33472
  }
33498
33473
  }
33499
33474
  _create_class$5(AxAI, [
33475
+ {
33476
+ key: "setModelMap",
33477
+ value: function setModelMap(modelMap) {
33478
+ this.ai.setModelMap(modelMap);
33479
+ }
33480
+ },
33481
+ {
33482
+ key: "setEmbedModelMap",
33483
+ value: function setEmbedModelMap(modelMap) {
33484
+ this.ai.setEmbedModelMap(modelMap);
33485
+ }
33486
+ },
33500
33487
  {
33501
33488
  key: "getName",
33502
33489
  value: function getName() {
@@ -33527,12 +33514,6 @@ var AxAI = /*#__PURE__*/ function() {
33527
33514
  return this.ai.getFeatures();
33528
33515
  }
33529
33516
  },
33530
- {
33531
- key: "getModelMap",
33532
- value: function getModelMap() {
33533
- return this.ai.getModelMap();
33534
- }
33535
- },
33536
33517
  {
33537
33518
  key: "chat",
33538
33519
  value: function chat(req, options) {
@@ -34091,8 +34072,8 @@ var assertRequiredFields = function(sig, values) {
34091
34072
  });
34092
34073
  if (missingFields.length > 0) {
34093
34074
  throw new AxAssertionError({
34094
- message: "Output must include: t: ".concat(missingFields.map(function(f) {
34095
- return "`".concat(f.title, ":`");
34075
+ message: "Missing required fields: ".concat(missingFields.map(function(f) {
34076
+ return f.name;
34096
34077
  }).join(", ")),
34097
34078
  values: values
34098
34079
  });
@@ -38898,7 +38879,7 @@ var AxPromptTemplate = function AxPromptTemplate(sig, fieldTemplates) {
38898
38879
  this.outputFormat = {
38899
38880
  type: "text",
38900
38881
  text: [
38901
- "Use the following output format."
38882
+ "Follow the following format."
38902
38883
  ].concat(_to_consumable_array$2(this.renderOutFields(this.sig.getOutputFields())), [
38903
38884
  "---\n\n"
38904
38885
  ]).join("\n\n")
@@ -40714,7 +40695,9 @@ let DspService = class DspService {
40714
40695
  }
40715
40696
  async modelOpenAI(params, _pinsSettingsList, context) {
40716
40697
  var _context_privates_OPENAI_API_KEY, _context_privates_OPENAI_SERVER;
40717
- const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config, options } = params;
40698
+ const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config = {
40699
+ model: 'gpt-4o-mini'
40700
+ }, options } = params;
40718
40701
  const modelInstance = new AxAIOpenAI({
40719
40702
  apiKey,
40720
40703
  apiURL,