@digipair/skill-dsp 0.21.14 → 0.22.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.esm.js CHANGED
@@ -20332,7 +20332,6 @@ about the parse state.
20332
20332
  var _a;
20333
20333
  var depth = action >> 19 /* Action.ReduceDepthShift */ , type = action & 65535 /* Action.ValueMask */ ;
20334
20334
  var parser = this.p.parser;
20335
- if (this.reducePos < this.pos - 25 /* Lookahead.Margin */ ) this.setLookAhead(this.pos);
20336
20335
  var dPrec = parser.dynamicPrecedence(type);
20337
20336
  if (dPrec) this.score += dPrec;
20338
20337
  if (depth == 0) {
@@ -21285,7 +21284,7 @@ function cutAt(tree, pos, side) {
21285
21284
  cursor.moveTo(pos);
21286
21285
  for(;;){
21287
21286
  if (!(side < 0 ? cursor.childBefore(pos) : cursor.childAfter(pos))) for(;;){
21288
- if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Lookahead.Margin */ )) : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Lookahead.Margin */ ));
21287
+ if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Safety.Margin */ )) : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Safety.Margin */ ));
21289
21288
  if (side < 0 ? cursor.prevSibling() : cursor.nextSibling()) break;
21290
21289
  if (!cursor.parent()) return side < 0 ? 0 : tree.length;
21291
21290
  }
@@ -21408,7 +21407,7 @@ var TokenCache = /*#__PURE__*/ function() {
21408
21407
  token.mask = mask;
21409
21408
  token.context = context;
21410
21409
  }
21411
- if (token.lookAhead > token.end + 25 /* Lookahead.Margin */ ) lookAhead = Math.max(token.lookAhead, lookAhead);
21410
+ if (token.lookAhead > token.end + 25 /* Safety.Margin */ ) lookAhead = Math.max(token.lookAhead, lookAhead);
21412
21411
  if (token.value != 0 /* Term.Err */ ) {
21413
21412
  var startIndex = actionIndex;
21414
21413
  if (token.extended > -1) actionIndex = this.addActions(stack, token.extended, token.end, actionIndex);
@@ -23499,14 +23498,14 @@ function indent(str, spaces) {
23499
23498
  var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
23500
23499
  // match is required
23501
23500
  if (!match) {
23502
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, {
23501
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, {
23503
23502
  v: nextMatch1
23504
23503
  };
23505
23504
  }
23506
23505
  var token = match.token, offset = match.offset;
23507
23506
  i1 += offset;
23508
23507
  if (token === " ") {
23509
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, "continue";
23508
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, "continue";
23510
23509
  }
23511
23510
  tokens1 = _to_consumable_array$6(tokens1).concat([
23512
23511
  token
@@ -23525,7 +23524,7 @@ function indent(str, spaces) {
23525
23524
  if (contextKeys.some(function(el) {
23526
23525
  return el.startsWith(name);
23527
23526
  })) {
23528
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, "continue";
23527
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, "continue";
23529
23528
  }
23530
23529
  if (dateTimeIdentifiers.some(function(el) {
23531
23530
  return el === name;
@@ -23544,9 +23543,9 @@ function indent(str, spaces) {
23544
23543
  if (dateTimeIdentifiers.some(function(el) {
23545
23544
  return el.startsWith(name);
23546
23545
  })) {
23547
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, "continue";
23546
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, "continue";
23548
23547
  }
23549
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, {
23548
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, {
23550
23549
  v: nextMatch1
23551
23550
  };
23552
23551
  };
@@ -24349,7 +24348,6 @@ var parser = LRParser.deserialize({
24349
24348
  },
24350
24349
  dynamicPrecedences: {
24351
24350
  "31": -1,
24352
- "67": 1,
24353
24351
  "71": -1,
24354
24352
  "73": -1
24355
24353
  },
@@ -27352,7 +27350,11 @@ const applyTemplate = (value, context)=>{
27352
27350
  if (result.startsWith('EVALUATE:')) {
27353
27351
  const path = result.replace(/^EVALUATE:/, '');
27354
27352
  result = evaluate(path, _extends({}, context, {
27355
- getTime: (time)=>new Date(time).getTime()
27353
+ getTime: (time)=>new Date(time).getTime(),
27354
+ atob: (value)=>atob(value),
27355
+ btoa: (value)=>btoa(value),
27356
+ encodeURIComponent: (value)=>encodeURIComponent(value),
27357
+ decodeURIComponent: (value)=>decodeURIComponent(value)
27356
27358
  }));
27357
27359
  }
27358
27360
  } else if (typeof value === 'object' && Array.isArray(value)) {
@@ -28607,7 +28609,7 @@ var axBaseAIDefaultConfig = function() {
28607
28609
  };
28608
28610
  var AxBaseAI = /*#__PURE__*/ function() {
28609
28611
  function AxBaseAI(param) {
28610
- var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor, modelMap = param.modelMap;
28612
+ var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor;
28611
28613
  _class_call_check$p(this, AxBaseAI);
28612
28614
  _define_property$p(this, "generateChatReq", void 0);
28613
28615
  _define_property$p(this, "generateEmbedReq", void 0);
@@ -28619,29 +28621,48 @@ var AxBaseAI = /*#__PURE__*/ function() {
28619
28621
  _define_property$p(this, "fetch", void 0);
28620
28622
  _define_property$p(this, "tracer", void 0);
28621
28623
  _define_property$p(this, "modelMap", void 0);
28622
- _define_property$p(this, "modelInfo", void 0);
28624
+ _define_property$p(this, "embedModelMap", void 0);
28623
28625
  _define_property$p(this, "modelUsage", void 0);
28624
28626
  _define_property$p(this, "embedModelUsage", void 0);
28625
- _define_property$p(this, "models", void 0);
28626
28627
  _define_property$p(this, "apiURL", void 0);
28627
28628
  _define_property$p(this, "name", void 0);
28628
28629
  _define_property$p(this, "headers", void 0);
28630
+ _define_property$p(this, "modelInfo", void 0);
28631
+ _define_property$p(this, "embedModelInfo", void 0);
28629
28632
  _define_property$p(this, "supportFor", void 0);
28630
28633
  this.name = name;
28631
28634
  this.apiURL = apiURL;
28632
28635
  this.headers = headers;
28633
28636
  this.supportFor = supportFor;
28634
28637
  this.tracer = options.tracer;
28635
- this.modelInfo = modelInfo;
28636
- this.modelMap = modelMap;
28637
- var _modelMap_models_model, _models_embedModel, _modelMap_;
28638
- this.models = {
28639
- model: (_modelMap_models_model = modelMap === null || modelMap === void 0 ? void 0 : modelMap[models.model]) !== null && _modelMap_models_model !== void 0 ? _modelMap_models_model : models.model,
28640
- embedModel: (_modelMap_ = modelMap === null || modelMap === void 0 ? void 0 : modelMap[(_models_embedModel = models.embedModel) !== null && _models_embedModel !== void 0 ? _models_embedModel : ""]) !== null && _modelMap_ !== void 0 ? _modelMap_ : models.embedModel
28641
- };
28642
- if (!models.model || typeof models.model !== "string" || models.model === "") {
28638
+ var model = this.getModel(models.model);
28639
+ var embedModel = this.getEmbedModel(models.embedModel);
28640
+ if (typeof model === "string") {
28641
+ var modelName = model.replace(/-0\d+$|-\d{2,}$/, "");
28642
+ var _modelInfo_filter_at;
28643
+ this.modelInfo = (_modelInfo_filter_at = modelInfo.filter(function(v) {
28644
+ return v.name === modelName;
28645
+ }).at(0)) !== null && _modelInfo_filter_at !== void 0 ? _modelInfo_filter_at : {
28646
+ name: model,
28647
+ currency: "usd",
28648
+ promptTokenCostPer1M: 0,
28649
+ completionTokenCostPer1M: 0
28650
+ };
28651
+ } else {
28643
28652
  throw new Error("No model defined");
28644
28653
  }
28654
+ if (typeof embedModel === "string") {
28655
+ var embedModelName = embedModel === null || embedModel === void 0 ? void 0 : embedModel.replace(/-0\d+$|-\d{2,}$/, "");
28656
+ var _modelInfo_filter_at1;
28657
+ this.embedModelInfo = (_modelInfo_filter_at1 = modelInfo.filter(function(v) {
28658
+ return v.name === embedModelName;
28659
+ }).at(0)) !== null && _modelInfo_filter_at1 !== void 0 ? _modelInfo_filter_at1 : {
28660
+ name: embedModel !== null && embedModel !== void 0 ? embedModel : "",
28661
+ currency: "usd",
28662
+ promptTokenCostPer1M: 0,
28663
+ completionTokenCostPer1M: 0
28664
+ };
28665
+ }
28645
28666
  this.setOptions(options);
28646
28667
  }
28647
28668
  _create_class$c(AxBaseAI, [
@@ -28681,27 +28702,21 @@ var AxBaseAI = /*#__PURE__*/ function() {
28681
28702
  }
28682
28703
  },
28683
28704
  {
28684
- key: "_getModelInfo",
28685
- value: function _getModelInfo(model) {
28686
- var _this_modelMap;
28687
- var _this_modelMap_model;
28688
- var _model = (_this_modelMap_model = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[model]) !== null && _this_modelMap_model !== void 0 ? _this_modelMap_model : model;
28689
- var modelName = _model.replace(/-0\d+$|-\d{2,}$/, "");
28690
- var _this_modelInfo_filter_at;
28691
- return (_this_modelInfo_filter_at = this.modelInfo.filter(function(v) {
28692
- return v.name === modelName;
28693
- }).at(0)) !== null && _this_modelInfo_filter_at !== void 0 ? _this_modelInfo_filter_at : {
28694
- name: model,
28695
- currency: "usd",
28696
- promptTokenCostPer1M: 0,
28697
- completionTokenCostPer1M: 0
28698
- };
28705
+ key: "setModelMap",
28706
+ value: function setModelMap(modelMap) {
28707
+ this.modelMap = modelMap;
28708
+ }
28709
+ },
28710
+ {
28711
+ key: "setEmbedModelMap",
28712
+ value: function setEmbedModelMap(embedModelMap) {
28713
+ this.embedModelMap = embedModelMap;
28699
28714
  }
28700
28715
  },
28701
28716
  {
28702
28717
  key: "getModelInfo",
28703
28718
  value: function getModelInfo() {
28704
- return _object_spread_props$8(_object_spread$h({}, this._getModelInfo(this.models.model)), {
28719
+ return _object_spread_props$8(_object_spread$h({}, this.modelInfo), {
28705
28720
  provider: this.name
28706
28721
  });
28707
28722
  }
@@ -28709,17 +28724,9 @@ var AxBaseAI = /*#__PURE__*/ function() {
28709
28724
  {
28710
28725
  key: "getEmbedModelInfo",
28711
28726
  value: function getEmbedModelInfo() {
28712
- if (this.models.embedModel) {
28713
- return _object_spread_props$8(_object_spread$h({}, this._getModelInfo(this.models.embedModel)), {
28714
- provider: this.name
28715
- });
28716
- }
28717
- }
28718
- },
28719
- {
28720
- key: "getModelMap",
28721
- value: function getModelMap() {
28722
- return this.modelMap;
28727
+ return this.embedModelInfo ? _object_spread_props$8(_object_spread$h({}, this.embedModelInfo), {
28728
+ provider: this.name
28729
+ }) : undefined;
28723
28730
  }
28724
28731
  },
28725
28732
  {
@@ -28742,14 +28749,13 @@ var AxBaseAI = /*#__PURE__*/ function() {
28742
28749
  },
28743
28750
  {
28744
28751
  key: "chat",
28745
- value: function chat(req, options) {
28752
+ value: function chat(_req, options) {
28746
28753
  var _this = this;
28747
28754
  return _async_to_generator$7(function() {
28748
- var _this_modelMap, _this_modelMap_req_model, model, _mc_stopSequences, _this_tracer, mc, _obj;
28755
+ var _mc_stopSequences, _this_tracer, mc, _obj;
28749
28756
  return _ts_generator$8(this, function(_state) {
28750
28757
  switch(_state.label){
28751
28758
  case 0:
28752
- model = req.model ? (_this_modelMap_req_model = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.model]) !== null && _this_modelMap_req_model !== void 0 ? _this_modelMap_req_model : req.model : _this.models.model;
28753
28759
  if (!_this.tracer) return [
28754
28760
  3,
28755
28761
  2
@@ -28759,7 +28765,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28759
28765
  4,
28760
28766
  (_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Chat Request", {
28761
28767
  kind: AxSpanKind.SERVER,
28762
- attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, model), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MAX_TOKENS, mc.maxTokens), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TEMPERATURE, mc.temperature), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_P, mc.topP), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_K, mc.topK), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_FREQUENCY_PENALTY, mc.frequencyPenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_PRESENCE_PENALTY, mc.presencePenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_STOP_SEQUENCES, (_mc_stopSequences = mc.stopSequences) === null || _mc_stopSequences === void 0 ? void 0 : _mc_stopSequences.join(", ")), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING, mc.stream), _obj)
28768
+ attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.modelInfo.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MAX_TOKENS, mc.maxTokens), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TEMPERATURE, mc.temperature), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_P, mc.topP), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_K, mc.topK), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_FREQUENCY_PENALTY, mc.frequencyPenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_PRESENCE_PENALTY, mc.presencePenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_STOP_SEQUENCES, (_mc_stopSequences = mc.stopSequences) === null || _mc_stopSequences === void 0 ? void 0 : _mc_stopSequences.join(", ")), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING, mc.stream), _obj)
28763
28769
  }, function() {
28764
28770
  var _ref = _async_to_generator$7(function(span) {
28765
28771
  var res;
@@ -28768,7 +28774,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28768
28774
  case 0:
28769
28775
  return [
28770
28776
  4,
28771
- _this._chat(model, req, options, span)
28777
+ _this._chat(_req, options, span)
28772
28778
  ];
28773
28779
  case 1:
28774
28780
  res = _state.sent();
@@ -28793,7 +28799,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28793
28799
  case 2:
28794
28800
  return [
28795
28801
  4,
28796
- _this._chat(model, req, options)
28802
+ _this._chat(_req, options)
28797
28803
  ];
28798
28804
  case 3:
28799
28805
  return [
@@ -28807,10 +28813,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
28807
28813
  },
28808
28814
  {
28809
28815
  key: "_chat",
28810
- value: function _chat(model, chatReq, options, span) {
28816
+ value: function _chat(_req, options, span) {
28811
28817
  var _this = this;
28812
28818
  return _async_to_generator$7(function() {
28813
- var _chatReq_modelConfig, reqFn, _options_stream, stream, functions, req, fn, rv, _tmp, respFn, wrappedRespFn, doneCb, st, res;
28819
+ var _req_modelConfig, reqFn, _options_stream, stream, functions, req, fn, rv, _tmp, respFn, wrappedRespFn, doneCb, st, res;
28814
28820
  return _ts_generator$8(this, function(_state) {
28815
28821
  switch(_state.label){
28816
28822
  case 0:
@@ -28818,14 +28824,11 @@ var AxBaseAI = /*#__PURE__*/ function() {
28818
28824
  throw new Error("generateChatReq not implemented");
28819
28825
  }
28820
28826
  reqFn = _this.generateChatReq;
28821
- stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (_chatReq_modelConfig = chatReq.modelConfig) === null || _chatReq_modelConfig === void 0 ? void 0 : _chatReq_modelConfig.stream;
28822
- if (chatReq.functions && chatReq.functions.length > 0) {
28823
- functions = chatReq.functions;
28824
- }
28825
- req = _object_spread_props$8(_object_spread$h({}, chatReq), {
28826
- model: model,
28827
+ stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (_req_modelConfig = _req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream;
28828
+ functions = _req.functions && _req.functions.length > 0 ? _req.functions : undefined;
28829
+ req = _object_spread_props$8(_object_spread$h({}, _req), {
28827
28830
  functions: functions,
28828
- modelConfig: _object_spread_props$8(_object_spread$h({}, chatReq.modelConfig), {
28831
+ modelConfig: _object_spread_props$8(_object_spread$h({}, _req.modelConfig), {
28829
28832
  stream: stream
28830
28833
  })
28831
28834
  });
@@ -28962,14 +28965,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
28962
28965
  value: function embed(req, options) {
28963
28966
  var _this = this;
28964
28967
  return _async_to_generator$7(function() {
28965
- var _this_modelMap, _this_modelMap_req_embedModel, embedModel, _this_tracer, _req_embedModel, _obj;
28968
+ var _this_tracer, _obj;
28966
28969
  return _ts_generator$8(this, function(_state) {
28967
28970
  switch(_state.label){
28968
28971
  case 0:
28969
- embedModel = req.embedModel ? (_this_modelMap_req_embedModel = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.embedModel]) !== null && _this_modelMap_req_embedModel !== void 0 ? _this_modelMap_req_embedModel : req.embedModel : _this.models.embedModel;
28970
- if (!embedModel) {
28971
- throw new Error("No embed model defined");
28972
- }
28973
28972
  if (!_this.tracer) return [
28974
28973
  3,
28975
28974
  2
@@ -28978,7 +28977,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28978
28977
  4,
28979
28978
  (_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Embed Request", {
28980
28979
  kind: AxSpanKind.SERVER,
28981
- attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, (_req_embedModel = req.embedModel) !== null && _req_embedModel !== void 0 ? _req_embedModel : _this.models.embedModel), _obj)
28980
+ attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.modelInfo.name), _obj)
28982
28981
  }, function() {
28983
28982
  var _ref = _async_to_generator$7(function(span) {
28984
28983
  var res;
@@ -28987,7 +28986,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28987
28986
  case 0:
28988
28987
  return [
28989
28988
  4,
28990
- _this._embed(embedModel, req, options, span)
28989
+ _this._embed(req, options, span)
28991
28990
  ];
28992
28991
  case 1:
28993
28992
  res = _state.sent();
@@ -29012,7 +29011,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
29012
29011
  case 2:
29013
29012
  return [
29014
29013
  2,
29015
- _this._embed(embedModel, req, options)
29014
+ _this._embed(req, options)
29016
29015
  ];
29017
29016
  }
29018
29017
  });
@@ -29021,10 +29020,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
29021
29020
  },
29022
29021
  {
29023
29022
  key: "_embed",
29024
- value: function _embed(embedModel, embedReq, options, span) {
29023
+ value: function _embed(req, options, span) {
29025
29024
  var _this = this;
29026
29025
  return _async_to_generator$7(function() {
29027
- var req, fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
29026
+ var fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
29028
29027
  return _ts_generator$8(this, function(_state) {
29029
29028
  switch(_state.label){
29030
29029
  case 0:
@@ -29034,9 +29033,6 @@ var AxBaseAI = /*#__PURE__*/ function() {
29034
29033
  if (!_this.generateEmbedResp) {
29035
29034
  throw new Error("generateEmbedResp not implemented");
29036
29035
  }
29037
- req = _object_spread_props$8(_object_spread$h({}, embedReq), {
29038
- embedModel: embedModel
29039
- });
29040
29036
  fn = function() {
29041
29037
  var _ref = _async_to_generator$7(function() {
29042
29038
  var _this_generateEmbedReq, apiConfig, reqValue, res;
@@ -29118,6 +29114,22 @@ var AxBaseAI = /*#__PURE__*/ function() {
29118
29114
  var headers = arguments.length > 0 && arguments[0] !== void 0 ? arguments[0] : {};
29119
29115
  return _object_spread$h({}, headers, this.headers);
29120
29116
  }
29117
+ },
29118
+ {
29119
+ key: "getEmbedModel",
29120
+ value: function getEmbedModel(name) {
29121
+ var _this_embedModelMap;
29122
+ var _this_embedModelMap_name;
29123
+ return name ? (_this_embedModelMap_name = (_this_embedModelMap = this.embedModelMap) === null || _this_embedModelMap === void 0 ? void 0 : _this_embedModelMap[name]) !== null && _this_embedModelMap_name !== void 0 ? _this_embedModelMap_name : name : undefined;
29124
+ }
29125
+ },
29126
+ {
29127
+ key: "getModel",
29128
+ value: function getModel(name) {
29129
+ var _this_modelMap;
29130
+ var _this_modelMap_name;
29131
+ return (_this_modelMap_name = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[name]) !== null && _this_modelMap_name !== void 0 ? _this_modelMap_name : name;
29132
+ }
29121
29133
  }
29122
29134
  ]);
29123
29135
  return AxBaseAI;
@@ -29281,7 +29293,6 @@ var AxAIOpenAIModel;
29281
29293
  (function(AxAIOpenAIModel) {
29282
29294
  AxAIOpenAIModel["GPT4"] = "gpt-4";
29283
29295
  AxAIOpenAIModel["GPT4O"] = "gpt-4o";
29284
- AxAIOpenAIModel["GPT4OMini"] = "gpt-4o-mini";
29285
29296
  AxAIOpenAIModel["GPT4Turbo"] = "gpt-4-turbo";
29286
29297
  AxAIOpenAIModel["GPT35Turbo"] = "gpt-3.5-turbo";
29287
29298
  AxAIOpenAIModel["GPT35TurboInstruct"] = "gpt-3.5-turbo-instruct";
@@ -29312,12 +29323,6 @@ var AxAIOpenAIEmbedModel;
29312
29323
  promptTokenCostPer1M: 5,
29313
29324
  completionTokenCostPer1M: 15
29314
29325
  },
29315
- {
29316
- name: AxAIOpenAIModel.GPT4OMini,
29317
- currency: "usd",
29318
- promptTokenCostPer1M: 0.15,
29319
- completionTokenCostPer1M: 0.6
29320
- },
29321
29326
  {
29322
29327
  name: AxAIOpenAIModel.GPT4Turbo,
29323
29328
  currency: "usd",
@@ -29473,7 +29478,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29473
29478
  _inherits$f(AxAIOpenAI, AxBaseAI);
29474
29479
  var _super = _create_super$f(AxAIOpenAI);
29475
29480
  function AxAIOpenAI(param) {
29476
- var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo, modelMap = param.modelMap;
29481
+ var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo;
29477
29482
  _class_call_check$o(this, AxAIOpenAI);
29478
29483
  var _this;
29479
29484
  if (!apiKey || apiKey === "") {
@@ -29495,16 +29500,14 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29495
29500
  supportFor: {
29496
29501
  functions: true,
29497
29502
  streaming: true
29498
- },
29499
- modelMap: modelMap
29503
+ }
29500
29504
  });
29501
29505
  _define_property$o(_assert_this_initialized$f(_this), "config", void 0);
29502
29506
  _define_property$o(_assert_this_initialized$f(_this), "streamingUsage", void 0);
29503
- _define_property$o(_assert_this_initialized$f(_this), "dimensions", void 0);
29504
29507
  _define_property$o(_assert_this_initialized$f(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
29505
29508
  _config) {
29506
29509
  var _req_functions, _req_modelConfig, _req_modelConfig1, _this_config, _this_config1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6, _req_modelConfig7;
29507
- var model = req.model;
29510
+ var model = _this.config.model;
29508
29511
  if (!req.chatPrompt || req.chatPrompt.length === 0) {
29509
29512
  throw new Error("Chat prompt is empty");
29510
29513
  }
@@ -29557,7 +29560,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29557
29560
  ];
29558
29561
  });
29559
29562
  _define_property$o(_assert_this_initialized$f(_this), "generateEmbedReq", function(req) {
29560
- var model = req.embedModel;
29563
+ var model = _this.config.embedModel;
29561
29564
  if (!model) {
29562
29565
  throw new Error("Embed model not set");
29563
29566
  }
@@ -29569,8 +29572,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29569
29572
  };
29570
29573
  var reqValue = {
29571
29574
  model: model,
29572
- input: req.texts,
29573
- dimensions: _this.dimensions
29575
+ input: req.texts
29574
29576
  };
29575
29577
  return [
29576
29578
  apiConfig,
@@ -29679,7 +29681,6 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29679
29681
  _this.config = _config;
29680
29682
  var _options_streamingUsage;
29681
29683
  _this.streamingUsage = (_options_streamingUsage = options === null || options === void 0 ? void 0 : options.streamingUsage) !== null && _options_streamingUsage !== void 0 ? _options_streamingUsage : true;
29682
- _this.dimensions = config === null || config === void 0 ? void 0 : config.dimensions;
29683
29684
  return _this;
29684
29685
  }
29685
29686
  _create_class$b(AxAIOpenAI, [
@@ -29913,7 +29914,7 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
29913
29914
  _inherits$e(AxAIAzureOpenAI, AxAIOpenAI);
29914
29915
  var _super = _create_super$e(AxAIAzureOpenAI);
29915
29916
  function AxAIAzureOpenAI(param) {
29916
- var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options, modelMap = param.modelMap;
29917
+ var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options;
29917
29918
  _class_call_check$n(this, AxAIAzureOpenAI);
29918
29919
  if (!apiKey || apiKey === "") {
29919
29920
  throw new Error("Azure OpenAPI API key not set");
@@ -29928,8 +29929,7 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
29928
29929
  var _this = _super.call(this, {
29929
29930
  apiKey: apiKey,
29930
29931
  config: _config,
29931
- options: options,
29932
- modelMap: modelMap
29932
+ options: options
29933
29933
  });
29934
29934
  var host = resourceName.includes("://") ? resourceName : "https://".concat(resourceName, ".openai.azure.com/");
29935
29935
  _get$5((_assert_this_initialized$e(_this), _get_prototype_of$e(AxAIAzureOpenAI.prototype)), "setName", _this).call(_this, "Azure OpenAI");
@@ -30075,7 +30075,7 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
30075
30075
  _inherits$d(AxAIHuggingFace, AxBaseAI);
30076
30076
  var _super = _create_super$d(AxAIHuggingFace);
30077
30077
  function AxAIHuggingFace(param) {
30078
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30078
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30079
30079
  _class_call_check$m(this, AxAIHuggingFace);
30080
30080
  var _this;
30081
30081
  if (!apiKey || apiKey === "") {
@@ -30096,14 +30096,13 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
30096
30096
  supportFor: {
30097
30097
  functions: false,
30098
30098
  streaming: false
30099
- },
30100
- modelMap: modelMap
30099
+ }
30101
30100
  });
30102
30101
  _define_property$m(_assert_this_initialized$d(_this), "config", void 0);
30103
30102
  _define_property$m(_assert_this_initialized$d(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
30104
30103
  _config) {
30105
30104
  var _req_chatPrompt, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4;
30106
- var model = req.model;
30105
+ var model = _this.config.model;
30107
30106
  var functionsList = req.functions ? "Functions:\n".concat(JSON.stringify(req.functions, null, 2), "\n") : "";
30108
30107
  var prompt = (_req_chatPrompt = req.chatPrompt) === null || _req_chatPrompt === void 0 ? void 0 : _req_chatPrompt.map(function(msg) {
30109
30108
  switch(msg.role){
@@ -30325,7 +30324,7 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
30325
30324
  _inherits$c(AxAITogether, AxAIOpenAI);
30326
30325
  var _super = _create_super$c(AxAITogether);
30327
30326
  function AxAITogether(param) {
30328
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30327
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30329
30328
  _class_call_check$l(this, AxAITogether);
30330
30329
  if (!apiKey || apiKey === "") {
30331
30330
  throw new Error("Together API key not set");
@@ -30336,8 +30335,7 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
30336
30335
  config: _config,
30337
30336
  options: options,
30338
30337
  apiURL: "https://api.together.xyz/v1",
30339
- modelInfo: axModelInfoTogether,
30340
- modelMap: modelMap
30338
+ modelInfo: axModelInfoTogether
30341
30339
  });
30342
30340
  _get$4((_assert_this_initialized$c(_this), _get_prototype_of$c(AxAITogether.prototype)), "setName", _this).call(_this, "Together");
30343
30341
  return _this;
@@ -30611,7 +30609,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30611
30609
  _inherits$b(AxAICohere, AxBaseAI);
30612
30610
  var _super = _create_super$b(AxAICohere);
30613
30611
  function AxAICohere(param) {
30614
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30612
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30615
30613
  _class_call_check$k(this, AxAICohere);
30616
30614
  var _this;
30617
30615
  if (!apiKey || apiKey === "") {
@@ -30632,14 +30630,13 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30632
30630
  functions: true,
30633
30631
  streaming: true
30634
30632
  },
30635
- options: options,
30636
- modelMap: modelMap
30633
+ options: options
30637
30634
  });
30638
30635
  _define_property$k(_assert_this_initialized$b(_this), "config", void 0);
30639
30636
  _define_property$k(_assert_this_initialized$b(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
30640
30637
  _config) {
30641
30638
  var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6;
30642
- var model = req.model;
30639
+ var model = _this.config.model;
30643
30640
  var lastChatMsg = req.chatPrompt.at(-1);
30644
30641
  var restOfChat = req.chatPrompt.slice(0, -1);
30645
30642
  var message;
@@ -30734,7 +30731,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30734
30731
  ];
30735
30732
  });
30736
30733
  _define_property$k(_assert_this_initialized$b(_this), "generateEmbedReq", function(req) {
30737
- var model = req.embedModel;
30734
+ var model = _this.config.embedModel;
30738
30735
  if (!model) {
30739
30736
  throw new Error("Embed model not set");
30740
30737
  }
@@ -31133,7 +31130,7 @@ var safetySettings = [
31133
31130
  _inherits$a(AxAIGoogleGemini, AxBaseAI);
31134
31131
  var _super = _create_super$a(AxAIGoogleGemini);
31135
31132
  function AxAIGoogleGemini(param) {
31136
- var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options, modelMap = param.modelMap;
31133
+ var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options;
31137
31134
  _class_call_check$j(this, AxAIGoogleGemini);
31138
31135
  var _this;
31139
31136
  if (!apiKey || apiKey === "") {
@@ -31157,15 +31154,14 @@ var safetySettings = [
31157
31154
  supportFor: {
31158
31155
  functions: true,
31159
31156
  streaming: true
31160
- },
31161
- modelMap: modelMap
31157
+ }
31162
31158
  });
31163
31159
  _define_property$j(_assert_this_initialized$a(_this), "options", void 0);
31164
31160
  _define_property$j(_assert_this_initialized$a(_this), "config", void 0);
31165
31161
  _define_property$j(_assert_this_initialized$a(_this), "apiKey", void 0);
31166
31162
  _define_property$j(_assert_this_initialized$a(_this), "generateChatReq", function(req) {
31167
31163
  var _req_modelConfig, _this_options, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
31168
- var model = req.model;
31164
+ var model = _this.config.model;
31169
31165
  var _req_modelConfig_stream;
31170
31166
  var stream = (_req_modelConfig_stream = (_req_modelConfig = req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream) !== null && _req_modelConfig_stream !== void 0 ? _req_modelConfig_stream : _this.config.stream;
31171
31167
  if (!req.chatPrompt || req.chatPrompt.length === 0) {
@@ -31345,7 +31341,7 @@ var safetySettings = [
31345
31341
  ];
31346
31342
  });
31347
31343
  _define_property$j(_assert_this_initialized$a(_this), "generateEmbedReq", function(req) {
31348
- var model = req.embedModel;
31344
+ var model = _this.config.embedModel;
31349
31345
  if (!model) {
31350
31346
  throw new Error("Embed model not set");
31351
31347
  }
@@ -31709,7 +31705,7 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
31709
31705
  _inherits$9(AxAIAnthropic, AxBaseAI);
31710
31706
  var _super = _create_super$9(AxAIAnthropic);
31711
31707
  function AxAIAnthropic(param) {
31712
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
31708
+ var apiKey = param.apiKey, config = param.config, options = param.options;
31713
31709
  _class_call_check$i(this, AxAIAnthropic);
31714
31710
  var _this;
31715
31711
  if (!apiKey || apiKey === "") {
@@ -31731,13 +31727,12 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
31731
31727
  supportFor: {
31732
31728
  functions: true,
31733
31729
  streaming: true
31734
- },
31735
- modelMap: modelMap
31730
+ }
31736
31731
  });
31737
31732
  _define_property$i(_assert_this_initialized$9(_this), "config", void 0);
31738
31733
  _define_property$i(_assert_this_initialized$9(_this), "generateChatReq", function(req) {
31739
31734
  var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
31740
- var model = req.model;
31735
+ var model = _this.config.model;
31741
31736
  var apiConfig = {
31742
31737
  name: "/messages"
31743
31738
  };
@@ -32634,7 +32629,7 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32634
32629
  _inherits$8(AxAIGroq, AxAIOpenAI);
32635
32630
  var _super = _create_super$8(AxAIGroq);
32636
32631
  function AxAIGroq(param) {
32637
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
32632
+ var apiKey = param.apiKey, config = param.config, options = param.options;
32638
32633
  _class_call_check$g(this, AxAIGroq);
32639
32634
  var _this;
32640
32635
  if (!apiKey || apiKey === "") {
@@ -32649,8 +32644,7 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32649
32644
  config: _config,
32650
32645
  options: _options,
32651
32646
  apiURL: "https://api.groq.com/openai/v1",
32652
- modelInfo: [],
32653
- modelMap: modelMap
32647
+ modelInfo: []
32654
32648
  });
32655
32649
  _define_property$g(_assert_this_initialized$8(_this), "setOptions", function(options) {
32656
32650
  var rateLimiter = _this.newRateLimiter(options);
@@ -32706,23 +32700,19 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32706
32700
  }
32707
32701
  (AxAIOpenAI);
32708
32702
 
32709
- // cspell:ignore mistral, mixtral, codestral, nemo
32710
32703
  var AxAIMistralModel;
32711
32704
  (function(AxAIMistralModel) {
32712
32705
  AxAIMistralModel["Mistral7B"] = "open-mistral-7b";
32713
32706
  AxAIMistralModel["Mistral8x7B"] = "open-mixtral-8x7b";
32714
32707
  AxAIMistralModel["MistralSmall"] = "mistral-small-latest";
32708
+ AxAIMistralModel["MistralMedium"] = "mistral-medium-latest";
32715
32709
  AxAIMistralModel["MistralLarge"] = "mistral-large-latest";
32716
- AxAIMistralModel["Codestral"] = "codestral-latest";
32717
- AxAIMistralModel["OpenCodestralMamba"] = "open-codestral-mamba";
32718
- AxAIMistralModel["OpenMistralNemo"] = "open-mistral-nemo-latest";
32719
32710
  })(AxAIMistralModel || (AxAIMistralModel = {}));
32720
32711
  var AxAIMistralEmbedModels;
32721
32712
  (function(AxAIMistralEmbedModels) {
32722
32713
  AxAIMistralEmbedModels["MistralEmbed"] = "mistral-embed";
32723
32714
  })(AxAIMistralEmbedModels || (AxAIMistralEmbedModels = {}));
32724
32715
 
32725
- // cspell:ignore mistral, mixtral, codestral, nemo
32726
32716
  var axModelInfoMistral = [
32727
32717
  {
32728
32718
  name: AxAIMistralModel.Mistral7B,
@@ -32743,28 +32733,16 @@ var axModelInfoMistral = [
32743
32733
  completionTokenCostPer1M: 6
32744
32734
  },
32745
32735
  {
32746
- name: AxAIMistralModel.MistralLarge,
32747
- currency: "USD",
32748
- promptTokenCostPer1M: 8,
32749
- completionTokenCostPer1M: 24
32750
- },
32751
- {
32752
- name: AxAIMistralModel.Codestral,
32736
+ name: AxAIMistralModel.MistralMedium,
32753
32737
  currency: "USD",
32754
- promptTokenCostPer1M: 1,
32755
- completionTokenCostPer1M: 3
32738
+ promptTokenCostPer1M: 2.7,
32739
+ completionTokenCostPer1M: 8.1
32756
32740
  },
32757
32741
  {
32758
- name: AxAIMistralModel.OpenCodestralMamba,
32759
- currency: "USD",
32760
- promptTokenCostPer1M: 0.25,
32761
- completionTokenCostPer1M: 0.25
32762
- },
32763
- {
32764
- name: AxAIMistralModel.OpenMistralNemo,
32742
+ name: AxAIMistralModel.MistralLarge,
32765
32743
  currency: "USD",
32766
- promptTokenCostPer1M: 0.3,
32767
- completionTokenCostPer1M: 0.3
32744
+ promptTokenCostPer1M: 8,
32745
+ completionTokenCostPer1M: 24
32768
32746
  }
32769
32747
  ];
32770
32748
 
@@ -32899,7 +32877,7 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
32899
32877
  _inherits$7(AxAIMistral, AxAIOpenAI);
32900
32878
  var _super = _create_super$7(AxAIMistral);
32901
32879
  function AxAIMistral(param) {
32902
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
32880
+ var apiKey = param.apiKey, config = param.config, options = param.options;
32903
32881
  _class_call_check$f(this, AxAIMistral);
32904
32882
  if (!apiKey || apiKey === "") {
32905
32883
  throw new Error("Mistral API key not set");
@@ -32910,8 +32888,7 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
32910
32888
  config: _config,
32911
32889
  options: options,
32912
32890
  apiURL: "https://api.mistral.ai/v1",
32913
- modelInfo: axModelInfoMistral,
32914
- modelMap: modelMap
32891
+ modelInfo: axModelInfoMistral
32915
32892
  });
32916
32893
  _get$2((_assert_this_initialized$7(_this), _get_prototype_of$7(AxAIMistral.prototype)), "setName", _this).call(_this, "Mistral");
32917
32894
  return _this;
@@ -33075,7 +33052,7 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
33075
33052
  _inherits$6(AxAIDeepSeek, AxAIOpenAI);
33076
33053
  var _super = _create_super$6(AxAIDeepSeek);
33077
33054
  function AxAIDeepSeek(param) {
33078
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
33055
+ var apiKey = param.apiKey, config = param.config, options = param.options;
33079
33056
  _class_call_check$e(this, AxAIDeepSeek);
33080
33057
  if (!apiKey || apiKey === "") {
33081
33058
  throw new Error("DeepSeek API key not set");
@@ -33086,8 +33063,7 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
33086
33063
  config: _config,
33087
33064
  options: options,
33088
33065
  apiURL: "https://api.deepseek.com",
33089
- modelInfo: axModelInfoDeepSeek,
33090
- modelMap: modelMap
33066
+ modelInfo: axModelInfoDeepSeek
33091
33067
  });
33092
33068
  _get$1((_assert_this_initialized$6(_this), _get_prototype_of$6(AxAIDeepSeek.prototype)), "setName", _this).call(_this, "DeepSeek");
33093
33069
  return _this;
@@ -33255,15 +33231,14 @@ var axAIOllamaDefaultConfig = function() {
33255
33231
  _inherits$5(AxAIOllama, AxAIOpenAI);
33256
33232
  var _super = _create_super$5(AxAIOllama);
33257
33233
  function AxAIOllama(param) {
33258
- var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options, modelMap = param.modelMap;
33234
+ var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options;
33259
33235
  _class_call_check$d(this, AxAIOllama);
33260
33236
  var _config = _object_spread$6({}, axAIOllamaDefaultConfig(), config);
33261
33237
  var _this = _super.call(this, {
33262
33238
  apiKey: apiKey,
33263
33239
  options: options,
33264
33240
  config: _config,
33265
- apiURL: new URL("/api", url).href,
33266
- modelMap: modelMap
33241
+ apiURL: new URL("/v1", url).href
33267
33242
  });
33268
33243
  _get((_assert_this_initialized$5(_this), _get_prototype_of$5(AxAIOllama.prototype)), "setName", _this).call(_this, "Ollama");
33269
33244
  return _this;
@@ -33471,6 +33446,18 @@ var AxAI = /*#__PURE__*/ function() {
33471
33446
  }
33472
33447
  }
33473
33448
  _create_class$5(AxAI, [
33449
+ {
33450
+ key: "setModelMap",
33451
+ value: function setModelMap(modelMap) {
33452
+ this.ai.setModelMap(modelMap);
33453
+ }
33454
+ },
33455
+ {
33456
+ key: "setEmbedModelMap",
33457
+ value: function setEmbedModelMap(modelMap) {
33458
+ this.ai.setEmbedModelMap(modelMap);
33459
+ }
33460
+ },
33474
33461
  {
33475
33462
  key: "getName",
33476
33463
  value: function getName() {
@@ -33501,12 +33488,6 @@ var AxAI = /*#__PURE__*/ function() {
33501
33488
  return this.ai.getFeatures();
33502
33489
  }
33503
33490
  },
33504
- {
33505
- key: "getModelMap",
33506
- value: function getModelMap() {
33507
- return this.ai.getModelMap();
33508
- }
33509
- },
33510
33491
  {
33511
33492
  key: "chat",
33512
33493
  value: function chat(req, options) {
@@ -34065,8 +34046,8 @@ var assertRequiredFields = function(sig, values) {
34065
34046
  });
34066
34047
  if (missingFields.length > 0) {
34067
34048
  throw new AxAssertionError({
34068
- message: "Output must include: t: ".concat(missingFields.map(function(f) {
34069
- return "`".concat(f.title, ":`");
34049
+ message: "Missing required fields: ".concat(missingFields.map(function(f) {
34050
+ return f.name;
34070
34051
  }).join(", ")),
34071
34052
  values: values
34072
34053
  });
@@ -38872,7 +38853,7 @@ var AxPromptTemplate = function AxPromptTemplate(sig, fieldTemplates) {
38872
38853
  this.outputFormat = {
38873
38854
  type: "text",
38874
38855
  text: [
38875
- "Use the following output format."
38856
+ "Follow the following format."
38876
38857
  ].concat(_to_consumable_array$2(this.renderOutFields(this.sig.getOutputFields())), [
38877
38858
  "---\n\n"
38878
38859
  ]).join("\n\n")
@@ -40688,7 +40669,9 @@ let DspService = class DspService {
40688
40669
  }
40689
40670
  async modelOpenAI(params, _pinsSettingsList, context) {
40690
40671
  var _context_privates_OPENAI_API_KEY, _context_privates_OPENAI_SERVER;
40691
- const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config, options } = params;
40672
+ const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config = {
40673
+ model: 'gpt-4o-mini'
40674
+ }, options } = params;
40692
40675
  const modelInstance = new AxAIOpenAI({
40693
40676
  apiKey,
40694
40677
  apiURL,