@digipair/skill-dsp 0.21.15 → 0.22.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.esm.js CHANGED
@@ -20332,7 +20332,6 @@ about the parse state.
20332
20332
  var _a;
20333
20333
  var depth = action >> 19 /* Action.ReduceDepthShift */ , type = action & 65535 /* Action.ValueMask */ ;
20334
20334
  var parser = this.p.parser;
20335
- if (this.reducePos < this.pos - 25 /* Lookahead.Margin */ ) this.setLookAhead(this.pos);
20336
20335
  var dPrec = parser.dynamicPrecedence(type);
20337
20336
  if (dPrec) this.score += dPrec;
20338
20337
  if (depth == 0) {
@@ -21285,7 +21284,7 @@ function cutAt(tree, pos, side) {
21285
21284
  cursor.moveTo(pos);
21286
21285
  for(;;){
21287
21286
  if (!(side < 0 ? cursor.childBefore(pos) : cursor.childAfter(pos))) for(;;){
21288
- if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Lookahead.Margin */ )) : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Lookahead.Margin */ ));
21287
+ if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Safety.Margin */ )) : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Safety.Margin */ ));
21289
21288
  if (side < 0 ? cursor.prevSibling() : cursor.nextSibling()) break;
21290
21289
  if (!cursor.parent()) return side < 0 ? 0 : tree.length;
21291
21290
  }
@@ -21408,7 +21407,7 @@ var TokenCache = /*#__PURE__*/ function() {
21408
21407
  token.mask = mask;
21409
21408
  token.context = context;
21410
21409
  }
21411
- if (token.lookAhead > token.end + 25 /* Lookahead.Margin */ ) lookAhead = Math.max(token.lookAhead, lookAhead);
21410
+ if (token.lookAhead > token.end + 25 /* Safety.Margin */ ) lookAhead = Math.max(token.lookAhead, lookAhead);
21412
21411
  if (token.value != 0 /* Term.Err */ ) {
21413
21412
  var startIndex = actionIndex;
21414
21413
  if (token.extended > -1) actionIndex = this.addActions(stack, token.extended, token.end, actionIndex);
@@ -23499,14 +23498,14 @@ function indent(str, spaces) {
23499
23498
  var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
23500
23499
  // match is required
23501
23500
  if (!match) {
23502
- return i = i1, tokens = tokens1, nextMatch = nextMatch1, {
23501
+ return i = i1, nextMatch = nextMatch1, tokens = tokens1, {
23503
23502
  v: nextMatch1
23504
23503
  };
23505
23504
  }
23506
23505
  var token = match.token, offset = match.offset;
23507
23506
  i1 += offset;
23508
23507
  if (token === " ") {
23509
- return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
23508
+ return i = i1, nextMatch = nextMatch1, tokens = tokens1, "continue";
23510
23509
  }
23511
23510
  tokens1 = _to_consumable_array$6(tokens1).concat([
23512
23511
  token
@@ -23525,7 +23524,7 @@ function indent(str, spaces) {
23525
23524
  if (contextKeys.some(function(el) {
23526
23525
  return el.startsWith(name);
23527
23526
  })) {
23528
- return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
23527
+ return i = i1, nextMatch = nextMatch1, tokens = tokens1, "continue";
23529
23528
  }
23530
23529
  if (dateTimeIdentifiers.some(function(el) {
23531
23530
  return el === name;
@@ -23544,9 +23543,9 @@ function indent(str, spaces) {
23544
23543
  if (dateTimeIdentifiers.some(function(el) {
23545
23544
  return el.startsWith(name);
23546
23545
  })) {
23547
- return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
23546
+ return i = i1, nextMatch = nextMatch1, tokens = tokens1, "continue";
23548
23547
  }
23549
- return i = i1, tokens = tokens1, nextMatch = nextMatch1, {
23548
+ return i = i1, nextMatch = nextMatch1, tokens = tokens1, {
23550
23549
  v: nextMatch1
23551
23550
  };
23552
23551
  };
@@ -24349,7 +24348,6 @@ var parser = LRParser.deserialize({
24349
24348
  },
24350
24349
  dynamicPrecedences: {
24351
24350
  "31": -1,
24352
- "67": 1,
24353
24351
  "71": -1,
24354
24352
  "73": -1
24355
24353
  },
@@ -27353,6 +27351,7 @@ const applyTemplate = (value, context)=>{
27353
27351
  const path = result.replace(/^EVALUATE:/, '');
27354
27352
  result = evaluate(path, _extends({}, context, {
27355
27353
  getTime: (time)=>new Date(time).getTime(),
27354
+ fromTime: (time)=>new Date(time).toISOString(),
27356
27355
  atob: (value)=>atob(value),
27357
27356
  btoa: (value)=>btoa(value),
27358
27357
  encodeURIComponent: (value)=>encodeURIComponent(value),
@@ -28611,7 +28610,7 @@ var axBaseAIDefaultConfig = function() {
28611
28610
  };
28612
28611
  var AxBaseAI = /*#__PURE__*/ function() {
28613
28612
  function AxBaseAI(param) {
28614
- var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor, modelMap = param.modelMap;
28613
+ var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor;
28615
28614
  _class_call_check$p(this, AxBaseAI);
28616
28615
  _define_property$p(this, "generateChatReq", void 0);
28617
28616
  _define_property$p(this, "generateEmbedReq", void 0);
@@ -28623,29 +28622,48 @@ var AxBaseAI = /*#__PURE__*/ function() {
28623
28622
  _define_property$p(this, "fetch", void 0);
28624
28623
  _define_property$p(this, "tracer", void 0);
28625
28624
  _define_property$p(this, "modelMap", void 0);
28626
- _define_property$p(this, "modelInfo", void 0);
28625
+ _define_property$p(this, "embedModelMap", void 0);
28627
28626
  _define_property$p(this, "modelUsage", void 0);
28628
28627
  _define_property$p(this, "embedModelUsage", void 0);
28629
- _define_property$p(this, "models", void 0);
28630
28628
  _define_property$p(this, "apiURL", void 0);
28631
28629
  _define_property$p(this, "name", void 0);
28632
28630
  _define_property$p(this, "headers", void 0);
28631
+ _define_property$p(this, "modelInfo", void 0);
28632
+ _define_property$p(this, "embedModelInfo", void 0);
28633
28633
  _define_property$p(this, "supportFor", void 0);
28634
28634
  this.name = name;
28635
28635
  this.apiURL = apiURL;
28636
28636
  this.headers = headers;
28637
28637
  this.supportFor = supportFor;
28638
28638
  this.tracer = options.tracer;
28639
- this.modelInfo = modelInfo;
28640
- this.modelMap = modelMap;
28641
- var _modelMap_models_model, _models_embedModel, _modelMap_;
28642
- this.models = {
28643
- model: (_modelMap_models_model = modelMap === null || modelMap === void 0 ? void 0 : modelMap[models.model]) !== null && _modelMap_models_model !== void 0 ? _modelMap_models_model : models.model,
28644
- embedModel: (_modelMap_ = modelMap === null || modelMap === void 0 ? void 0 : modelMap[(_models_embedModel = models.embedModel) !== null && _models_embedModel !== void 0 ? _models_embedModel : ""]) !== null && _modelMap_ !== void 0 ? _modelMap_ : models.embedModel
28645
- };
28646
- if (!models.model || typeof models.model !== "string" || models.model === "") {
28639
+ var model = this.getModel(models.model);
28640
+ var embedModel = this.getEmbedModel(models.embedModel);
28641
+ if (typeof model === "string") {
28642
+ var modelName = model.replace(/-0\d+$|-\d{2,}$/, "");
28643
+ var _modelInfo_filter_at;
28644
+ this.modelInfo = (_modelInfo_filter_at = modelInfo.filter(function(v) {
28645
+ return v.name === modelName;
28646
+ }).at(0)) !== null && _modelInfo_filter_at !== void 0 ? _modelInfo_filter_at : {
28647
+ name: model,
28648
+ currency: "usd",
28649
+ promptTokenCostPer1M: 0,
28650
+ completionTokenCostPer1M: 0
28651
+ };
28652
+ } else {
28647
28653
  throw new Error("No model defined");
28648
28654
  }
28655
+ if (typeof embedModel === "string") {
28656
+ var embedModelName = embedModel === null || embedModel === void 0 ? void 0 : embedModel.replace(/-0\d+$|-\d{2,}$/, "");
28657
+ var _modelInfo_filter_at1;
28658
+ this.embedModelInfo = (_modelInfo_filter_at1 = modelInfo.filter(function(v) {
28659
+ return v.name === embedModelName;
28660
+ }).at(0)) !== null && _modelInfo_filter_at1 !== void 0 ? _modelInfo_filter_at1 : {
28661
+ name: embedModel !== null && embedModel !== void 0 ? embedModel : "",
28662
+ currency: "usd",
28663
+ promptTokenCostPer1M: 0,
28664
+ completionTokenCostPer1M: 0
28665
+ };
28666
+ }
28649
28667
  this.setOptions(options);
28650
28668
  }
28651
28669
  _create_class$c(AxBaseAI, [
@@ -28685,27 +28703,21 @@ var AxBaseAI = /*#__PURE__*/ function() {
28685
28703
  }
28686
28704
  },
28687
28705
  {
28688
- key: "_getModelInfo",
28689
- value: function _getModelInfo(model) {
28690
- var _this_modelMap;
28691
- var _this_modelMap_model;
28692
- var _model = (_this_modelMap_model = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[model]) !== null && _this_modelMap_model !== void 0 ? _this_modelMap_model : model;
28693
- var modelName = _model.replace(/-0\d+$|-\d{2,}$/, "");
28694
- var _this_modelInfo_filter_at;
28695
- return (_this_modelInfo_filter_at = this.modelInfo.filter(function(v) {
28696
- return v.name === modelName;
28697
- }).at(0)) !== null && _this_modelInfo_filter_at !== void 0 ? _this_modelInfo_filter_at : {
28698
- name: model,
28699
- currency: "usd",
28700
- promptTokenCostPer1M: 0,
28701
- completionTokenCostPer1M: 0
28702
- };
28706
+ key: "setModelMap",
28707
+ value: function setModelMap(modelMap) {
28708
+ this.modelMap = modelMap;
28709
+ }
28710
+ },
28711
+ {
28712
+ key: "setEmbedModelMap",
28713
+ value: function setEmbedModelMap(embedModelMap) {
28714
+ this.embedModelMap = embedModelMap;
28703
28715
  }
28704
28716
  },
28705
28717
  {
28706
28718
  key: "getModelInfo",
28707
28719
  value: function getModelInfo() {
28708
- return _object_spread_props$8(_object_spread$h({}, this._getModelInfo(this.models.model)), {
28720
+ return _object_spread_props$8(_object_spread$h({}, this.modelInfo), {
28709
28721
  provider: this.name
28710
28722
  });
28711
28723
  }
@@ -28713,17 +28725,9 @@ var AxBaseAI = /*#__PURE__*/ function() {
28713
28725
  {
28714
28726
  key: "getEmbedModelInfo",
28715
28727
  value: function getEmbedModelInfo() {
28716
- if (this.models.embedModel) {
28717
- return _object_spread_props$8(_object_spread$h({}, this._getModelInfo(this.models.embedModel)), {
28718
- provider: this.name
28719
- });
28720
- }
28721
- }
28722
- },
28723
- {
28724
- key: "getModelMap",
28725
- value: function getModelMap() {
28726
- return this.modelMap;
28728
+ return this.embedModelInfo ? _object_spread_props$8(_object_spread$h({}, this.embedModelInfo), {
28729
+ provider: this.name
28730
+ }) : undefined;
28727
28731
  }
28728
28732
  },
28729
28733
  {
@@ -28746,14 +28750,13 @@ var AxBaseAI = /*#__PURE__*/ function() {
28746
28750
  },
28747
28751
  {
28748
28752
  key: "chat",
28749
- value: function chat(req, options) {
28753
+ value: function chat(_req, options) {
28750
28754
  var _this = this;
28751
28755
  return _async_to_generator$7(function() {
28752
- var _this_modelMap, _this_modelMap_req_model, model, _mc_stopSequences, _this_tracer, mc, _obj;
28756
+ var _mc_stopSequences, _this_tracer, mc, _obj;
28753
28757
  return _ts_generator$8(this, function(_state) {
28754
28758
  switch(_state.label){
28755
28759
  case 0:
28756
- model = req.model ? (_this_modelMap_req_model = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.model]) !== null && _this_modelMap_req_model !== void 0 ? _this_modelMap_req_model : req.model : _this.models.model;
28757
28760
  if (!_this.tracer) return [
28758
28761
  3,
28759
28762
  2
@@ -28763,7 +28766,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28763
28766
  4,
28764
28767
  (_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Chat Request", {
28765
28768
  kind: AxSpanKind.SERVER,
28766
- attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, model), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MAX_TOKENS, mc.maxTokens), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TEMPERATURE, mc.temperature), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_P, mc.topP), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_K, mc.topK), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_FREQUENCY_PENALTY, mc.frequencyPenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_PRESENCE_PENALTY, mc.presencePenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_STOP_SEQUENCES, (_mc_stopSequences = mc.stopSequences) === null || _mc_stopSequences === void 0 ? void 0 : _mc_stopSequences.join(", ")), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING, mc.stream), _obj)
28769
+ attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.modelInfo.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MAX_TOKENS, mc.maxTokens), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TEMPERATURE, mc.temperature), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_P, mc.topP), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_K, mc.topK), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_FREQUENCY_PENALTY, mc.frequencyPenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_PRESENCE_PENALTY, mc.presencePenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_STOP_SEQUENCES, (_mc_stopSequences = mc.stopSequences) === null || _mc_stopSequences === void 0 ? void 0 : _mc_stopSequences.join(", ")), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING, mc.stream), _obj)
28767
28770
  }, function() {
28768
28771
  var _ref = _async_to_generator$7(function(span) {
28769
28772
  var res;
@@ -28772,7 +28775,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28772
28775
  case 0:
28773
28776
  return [
28774
28777
  4,
28775
- _this._chat(model, req, options, span)
28778
+ _this._chat(_req, options, span)
28776
28779
  ];
28777
28780
  case 1:
28778
28781
  res = _state.sent();
@@ -28797,7 +28800,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28797
28800
  case 2:
28798
28801
  return [
28799
28802
  4,
28800
- _this._chat(model, req, options)
28803
+ _this._chat(_req, options)
28801
28804
  ];
28802
28805
  case 3:
28803
28806
  return [
@@ -28811,10 +28814,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
28811
28814
  },
28812
28815
  {
28813
28816
  key: "_chat",
28814
- value: function _chat(model, chatReq, options, span) {
28817
+ value: function _chat(_req, options, span) {
28815
28818
  var _this = this;
28816
28819
  return _async_to_generator$7(function() {
28817
- var _chatReq_modelConfig, reqFn, _options_stream, stream, functions, req, fn, rv, _tmp, respFn, wrappedRespFn, doneCb, st, res;
28820
+ var _req_modelConfig, reqFn, _options_stream, stream, functions, req, fn, rv, _tmp, respFn, wrappedRespFn, doneCb, st, res;
28818
28821
  return _ts_generator$8(this, function(_state) {
28819
28822
  switch(_state.label){
28820
28823
  case 0:
@@ -28822,14 +28825,11 @@ var AxBaseAI = /*#__PURE__*/ function() {
28822
28825
  throw new Error("generateChatReq not implemented");
28823
28826
  }
28824
28827
  reqFn = _this.generateChatReq;
28825
- stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (_chatReq_modelConfig = chatReq.modelConfig) === null || _chatReq_modelConfig === void 0 ? void 0 : _chatReq_modelConfig.stream;
28826
- if (chatReq.functions && chatReq.functions.length > 0) {
28827
- functions = chatReq.functions;
28828
- }
28829
- req = _object_spread_props$8(_object_spread$h({}, chatReq), {
28830
- model: model,
28828
+ stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (_req_modelConfig = _req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream;
28829
+ functions = _req.functions && _req.functions.length > 0 ? _req.functions : undefined;
28830
+ req = _object_spread_props$8(_object_spread$h({}, _req), {
28831
28831
  functions: functions,
28832
- modelConfig: _object_spread_props$8(_object_spread$h({}, chatReq.modelConfig), {
28832
+ modelConfig: _object_spread_props$8(_object_spread$h({}, _req.modelConfig), {
28833
28833
  stream: stream
28834
28834
  })
28835
28835
  });
@@ -28966,14 +28966,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
28966
28966
  value: function embed(req, options) {
28967
28967
  var _this = this;
28968
28968
  return _async_to_generator$7(function() {
28969
- var _this_modelMap, _this_modelMap_req_embedModel, embedModel, _this_tracer, _req_embedModel, _obj;
28969
+ var _this_tracer, _obj;
28970
28970
  return _ts_generator$8(this, function(_state) {
28971
28971
  switch(_state.label){
28972
28972
  case 0:
28973
- embedModel = req.embedModel ? (_this_modelMap_req_embedModel = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.embedModel]) !== null && _this_modelMap_req_embedModel !== void 0 ? _this_modelMap_req_embedModel : req.embedModel : _this.models.embedModel;
28974
- if (!embedModel) {
28975
- throw new Error("No embed model defined");
28976
- }
28977
28973
  if (!_this.tracer) return [
28978
28974
  3,
28979
28975
  2
@@ -28982,7 +28978,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28982
28978
  4,
28983
28979
  (_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Embed Request", {
28984
28980
  kind: AxSpanKind.SERVER,
28985
- attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, (_req_embedModel = req.embedModel) !== null && _req_embedModel !== void 0 ? _req_embedModel : _this.models.embedModel), _obj)
28981
+ attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.modelInfo.name), _obj)
28986
28982
  }, function() {
28987
28983
  var _ref = _async_to_generator$7(function(span) {
28988
28984
  var res;
@@ -28991,7 +28987,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
28991
28987
  case 0:
28992
28988
  return [
28993
28989
  4,
28994
- _this._embed(embedModel, req, options, span)
28990
+ _this._embed(req, options, span)
28995
28991
  ];
28996
28992
  case 1:
28997
28993
  res = _state.sent();
@@ -29016,7 +29012,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
29016
29012
  case 2:
29017
29013
  return [
29018
29014
  2,
29019
- _this._embed(embedModel, req, options)
29015
+ _this._embed(req, options)
29020
29016
  ];
29021
29017
  }
29022
29018
  });
@@ -29025,10 +29021,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
29025
29021
  },
29026
29022
  {
29027
29023
  key: "_embed",
29028
- value: function _embed(embedModel, embedReq, options, span) {
29024
+ value: function _embed(req, options, span) {
29029
29025
  var _this = this;
29030
29026
  return _async_to_generator$7(function() {
29031
- var req, fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
29027
+ var fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
29032
29028
  return _ts_generator$8(this, function(_state) {
29033
29029
  switch(_state.label){
29034
29030
  case 0:
@@ -29038,9 +29034,6 @@ var AxBaseAI = /*#__PURE__*/ function() {
29038
29034
  if (!_this.generateEmbedResp) {
29039
29035
  throw new Error("generateEmbedResp not implemented");
29040
29036
  }
29041
- req = _object_spread_props$8(_object_spread$h({}, embedReq), {
29042
- embedModel: embedModel
29043
- });
29044
29037
  fn = function() {
29045
29038
  var _ref = _async_to_generator$7(function() {
29046
29039
  var _this_generateEmbedReq, apiConfig, reqValue, res;
@@ -29122,6 +29115,22 @@ var AxBaseAI = /*#__PURE__*/ function() {
29122
29115
  var headers = arguments.length > 0 && arguments[0] !== void 0 ? arguments[0] : {};
29123
29116
  return _object_spread$h({}, headers, this.headers);
29124
29117
  }
29118
+ },
29119
+ {
29120
+ key: "getEmbedModel",
29121
+ value: function getEmbedModel(name) {
29122
+ var _this_embedModelMap;
29123
+ var _this_embedModelMap_name;
29124
+ return name ? (_this_embedModelMap_name = (_this_embedModelMap = this.embedModelMap) === null || _this_embedModelMap === void 0 ? void 0 : _this_embedModelMap[name]) !== null && _this_embedModelMap_name !== void 0 ? _this_embedModelMap_name : name : undefined;
29125
+ }
29126
+ },
29127
+ {
29128
+ key: "getModel",
29129
+ value: function getModel(name) {
29130
+ var _this_modelMap;
29131
+ var _this_modelMap_name;
29132
+ return (_this_modelMap_name = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[name]) !== null && _this_modelMap_name !== void 0 ? _this_modelMap_name : name;
29133
+ }
29125
29134
  }
29126
29135
  ]);
29127
29136
  return AxBaseAI;
@@ -29285,7 +29294,6 @@ var AxAIOpenAIModel;
29285
29294
  (function(AxAIOpenAIModel) {
29286
29295
  AxAIOpenAIModel["GPT4"] = "gpt-4";
29287
29296
  AxAIOpenAIModel["GPT4O"] = "gpt-4o";
29288
- AxAIOpenAIModel["GPT4OMini"] = "gpt-4o-mini";
29289
29297
  AxAIOpenAIModel["GPT4Turbo"] = "gpt-4-turbo";
29290
29298
  AxAIOpenAIModel["GPT35Turbo"] = "gpt-3.5-turbo";
29291
29299
  AxAIOpenAIModel["GPT35TurboInstruct"] = "gpt-3.5-turbo-instruct";
@@ -29316,12 +29324,6 @@ var AxAIOpenAIEmbedModel;
29316
29324
  promptTokenCostPer1M: 5,
29317
29325
  completionTokenCostPer1M: 15
29318
29326
  },
29319
- {
29320
- name: AxAIOpenAIModel.GPT4OMini,
29321
- currency: "usd",
29322
- promptTokenCostPer1M: 0.15,
29323
- completionTokenCostPer1M: 0.6
29324
- },
29325
29327
  {
29326
29328
  name: AxAIOpenAIModel.GPT4Turbo,
29327
29329
  currency: "usd",
@@ -29477,7 +29479,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29477
29479
  _inherits$f(AxAIOpenAI, AxBaseAI);
29478
29480
  var _super = _create_super$f(AxAIOpenAI);
29479
29481
  function AxAIOpenAI(param) {
29480
- var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo, modelMap = param.modelMap;
29482
+ var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo;
29481
29483
  _class_call_check$o(this, AxAIOpenAI);
29482
29484
  var _this;
29483
29485
  if (!apiKey || apiKey === "") {
@@ -29499,16 +29501,14 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29499
29501
  supportFor: {
29500
29502
  functions: true,
29501
29503
  streaming: true
29502
- },
29503
- modelMap: modelMap
29504
+ }
29504
29505
  });
29505
29506
  _define_property$o(_assert_this_initialized$f(_this), "config", void 0);
29506
29507
  _define_property$o(_assert_this_initialized$f(_this), "streamingUsage", void 0);
29507
- _define_property$o(_assert_this_initialized$f(_this), "dimensions", void 0);
29508
29508
  _define_property$o(_assert_this_initialized$f(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
29509
29509
  _config) {
29510
29510
  var _req_functions, _req_modelConfig, _req_modelConfig1, _this_config, _this_config1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6, _req_modelConfig7;
29511
- var model = req.model;
29511
+ var model = _this.config.model;
29512
29512
  if (!req.chatPrompt || req.chatPrompt.length === 0) {
29513
29513
  throw new Error("Chat prompt is empty");
29514
29514
  }
@@ -29561,7 +29561,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29561
29561
  ];
29562
29562
  });
29563
29563
  _define_property$o(_assert_this_initialized$f(_this), "generateEmbedReq", function(req) {
29564
- var model = req.embedModel;
29564
+ var model = _this.config.embedModel;
29565
29565
  if (!model) {
29566
29566
  throw new Error("Embed model not set");
29567
29567
  }
@@ -29573,8 +29573,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29573
29573
  };
29574
29574
  var reqValue = {
29575
29575
  model: model,
29576
- input: req.texts,
29577
- dimensions: _this.dimensions
29576
+ input: req.texts
29578
29577
  };
29579
29578
  return [
29580
29579
  apiConfig,
@@ -29683,7 +29682,6 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
29683
29682
  _this.config = _config;
29684
29683
  var _options_streamingUsage;
29685
29684
  _this.streamingUsage = (_options_streamingUsage = options === null || options === void 0 ? void 0 : options.streamingUsage) !== null && _options_streamingUsage !== void 0 ? _options_streamingUsage : true;
29686
- _this.dimensions = config === null || config === void 0 ? void 0 : config.dimensions;
29687
29685
  return _this;
29688
29686
  }
29689
29687
  _create_class$b(AxAIOpenAI, [
@@ -29917,7 +29915,7 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
29917
29915
  _inherits$e(AxAIAzureOpenAI, AxAIOpenAI);
29918
29916
  var _super = _create_super$e(AxAIAzureOpenAI);
29919
29917
  function AxAIAzureOpenAI(param) {
29920
- var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options, modelMap = param.modelMap;
29918
+ var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options;
29921
29919
  _class_call_check$n(this, AxAIAzureOpenAI);
29922
29920
  if (!apiKey || apiKey === "") {
29923
29921
  throw new Error("Azure OpenAPI API key not set");
@@ -29932,8 +29930,7 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
29932
29930
  var _this = _super.call(this, {
29933
29931
  apiKey: apiKey,
29934
29932
  config: _config,
29935
- options: options,
29936
- modelMap: modelMap
29933
+ options: options
29937
29934
  });
29938
29935
  var host = resourceName.includes("://") ? resourceName : "https://".concat(resourceName, ".openai.azure.com/");
29939
29936
  _get$5((_assert_this_initialized$e(_this), _get_prototype_of$e(AxAIAzureOpenAI.prototype)), "setName", _this).call(_this, "Azure OpenAI");
@@ -30079,7 +30076,7 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
30079
30076
  _inherits$d(AxAIHuggingFace, AxBaseAI);
30080
30077
  var _super = _create_super$d(AxAIHuggingFace);
30081
30078
  function AxAIHuggingFace(param) {
30082
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30079
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30083
30080
  _class_call_check$m(this, AxAIHuggingFace);
30084
30081
  var _this;
30085
30082
  if (!apiKey || apiKey === "") {
@@ -30100,14 +30097,13 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
30100
30097
  supportFor: {
30101
30098
  functions: false,
30102
30099
  streaming: false
30103
- },
30104
- modelMap: modelMap
30100
+ }
30105
30101
  });
30106
30102
  _define_property$m(_assert_this_initialized$d(_this), "config", void 0);
30107
30103
  _define_property$m(_assert_this_initialized$d(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
30108
30104
  _config) {
30109
30105
  var _req_chatPrompt, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4;
30110
- var model = req.model;
30106
+ var model = _this.config.model;
30111
30107
  var functionsList = req.functions ? "Functions:\n".concat(JSON.stringify(req.functions, null, 2), "\n") : "";
30112
30108
  var prompt = (_req_chatPrompt = req.chatPrompt) === null || _req_chatPrompt === void 0 ? void 0 : _req_chatPrompt.map(function(msg) {
30113
30109
  switch(msg.role){
@@ -30329,7 +30325,7 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
30329
30325
  _inherits$c(AxAITogether, AxAIOpenAI);
30330
30326
  var _super = _create_super$c(AxAITogether);
30331
30327
  function AxAITogether(param) {
30332
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30328
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30333
30329
  _class_call_check$l(this, AxAITogether);
30334
30330
  if (!apiKey || apiKey === "") {
30335
30331
  throw new Error("Together API key not set");
@@ -30340,8 +30336,7 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
30340
30336
  config: _config,
30341
30337
  options: options,
30342
30338
  apiURL: "https://api.together.xyz/v1",
30343
- modelInfo: axModelInfoTogether,
30344
- modelMap: modelMap
30339
+ modelInfo: axModelInfoTogether
30345
30340
  });
30346
30341
  _get$4((_assert_this_initialized$c(_this), _get_prototype_of$c(AxAITogether.prototype)), "setName", _this).call(_this, "Together");
30347
30342
  return _this;
@@ -30615,7 +30610,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30615
30610
  _inherits$b(AxAICohere, AxBaseAI);
30616
30611
  var _super = _create_super$b(AxAICohere);
30617
30612
  function AxAICohere(param) {
30618
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
30613
+ var apiKey = param.apiKey, config = param.config, options = param.options;
30619
30614
  _class_call_check$k(this, AxAICohere);
30620
30615
  var _this;
30621
30616
  if (!apiKey || apiKey === "") {
@@ -30636,14 +30631,13 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30636
30631
  functions: true,
30637
30632
  streaming: true
30638
30633
  },
30639
- options: options,
30640
- modelMap: modelMap
30634
+ options: options
30641
30635
  });
30642
30636
  _define_property$k(_assert_this_initialized$b(_this), "config", void 0);
30643
30637
  _define_property$k(_assert_this_initialized$b(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
30644
30638
  _config) {
30645
30639
  var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6;
30646
- var model = req.model;
30640
+ var model = _this.config.model;
30647
30641
  var lastChatMsg = req.chatPrompt.at(-1);
30648
30642
  var restOfChat = req.chatPrompt.slice(0, -1);
30649
30643
  var message;
@@ -30738,7 +30732,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
30738
30732
  ];
30739
30733
  });
30740
30734
  _define_property$k(_assert_this_initialized$b(_this), "generateEmbedReq", function(req) {
30741
- var model = req.embedModel;
30735
+ var model = _this.config.embedModel;
30742
30736
  if (!model) {
30743
30737
  throw new Error("Embed model not set");
30744
30738
  }
@@ -31137,7 +31131,7 @@ var safetySettings = [
31137
31131
  _inherits$a(AxAIGoogleGemini, AxBaseAI);
31138
31132
  var _super = _create_super$a(AxAIGoogleGemini);
31139
31133
  function AxAIGoogleGemini(param) {
31140
- var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options, modelMap = param.modelMap;
31134
+ var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options;
31141
31135
  _class_call_check$j(this, AxAIGoogleGemini);
31142
31136
  var _this;
31143
31137
  if (!apiKey || apiKey === "") {
@@ -31161,15 +31155,14 @@ var safetySettings = [
31161
31155
  supportFor: {
31162
31156
  functions: true,
31163
31157
  streaming: true
31164
- },
31165
- modelMap: modelMap
31158
+ }
31166
31159
  });
31167
31160
  _define_property$j(_assert_this_initialized$a(_this), "options", void 0);
31168
31161
  _define_property$j(_assert_this_initialized$a(_this), "config", void 0);
31169
31162
  _define_property$j(_assert_this_initialized$a(_this), "apiKey", void 0);
31170
31163
  _define_property$j(_assert_this_initialized$a(_this), "generateChatReq", function(req) {
31171
31164
  var _req_modelConfig, _this_options, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
31172
- var model = req.model;
31165
+ var model = _this.config.model;
31173
31166
  var _req_modelConfig_stream;
31174
31167
  var stream = (_req_modelConfig_stream = (_req_modelConfig = req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream) !== null && _req_modelConfig_stream !== void 0 ? _req_modelConfig_stream : _this.config.stream;
31175
31168
  if (!req.chatPrompt || req.chatPrompt.length === 0) {
@@ -31349,7 +31342,7 @@ var safetySettings = [
31349
31342
  ];
31350
31343
  });
31351
31344
  _define_property$j(_assert_this_initialized$a(_this), "generateEmbedReq", function(req) {
31352
- var model = req.embedModel;
31345
+ var model = _this.config.embedModel;
31353
31346
  if (!model) {
31354
31347
  throw new Error("Embed model not set");
31355
31348
  }
@@ -31713,7 +31706,7 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
31713
31706
  _inherits$9(AxAIAnthropic, AxBaseAI);
31714
31707
  var _super = _create_super$9(AxAIAnthropic);
31715
31708
  function AxAIAnthropic(param) {
31716
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
31709
+ var apiKey = param.apiKey, config = param.config, options = param.options;
31717
31710
  _class_call_check$i(this, AxAIAnthropic);
31718
31711
  var _this;
31719
31712
  if (!apiKey || apiKey === "") {
@@ -31735,13 +31728,12 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
31735
31728
  supportFor: {
31736
31729
  functions: true,
31737
31730
  streaming: true
31738
- },
31739
- modelMap: modelMap
31731
+ }
31740
31732
  });
31741
31733
  _define_property$i(_assert_this_initialized$9(_this), "config", void 0);
31742
31734
  _define_property$i(_assert_this_initialized$9(_this), "generateChatReq", function(req) {
31743
31735
  var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
31744
- var model = req.model;
31736
+ var model = _this.config.model;
31745
31737
  var apiConfig = {
31746
31738
  name: "/messages"
31747
31739
  };
@@ -32638,7 +32630,7 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32638
32630
  _inherits$8(AxAIGroq, AxAIOpenAI);
32639
32631
  var _super = _create_super$8(AxAIGroq);
32640
32632
  function AxAIGroq(param) {
32641
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
32633
+ var apiKey = param.apiKey, config = param.config, options = param.options;
32642
32634
  _class_call_check$g(this, AxAIGroq);
32643
32635
  var _this;
32644
32636
  if (!apiKey || apiKey === "") {
@@ -32653,8 +32645,7 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32653
32645
  config: _config,
32654
32646
  options: _options,
32655
32647
  apiURL: "https://api.groq.com/openai/v1",
32656
- modelInfo: [],
32657
- modelMap: modelMap
32648
+ modelInfo: []
32658
32649
  });
32659
32650
  _define_property$g(_assert_this_initialized$8(_this), "setOptions", function(options) {
32660
32651
  var rateLimiter = _this.newRateLimiter(options);
@@ -32710,23 +32701,19 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
32710
32701
  }
32711
32702
  (AxAIOpenAI);
32712
32703
 
32713
- // cspell:ignore mistral, mixtral, codestral, nemo
32714
32704
  var AxAIMistralModel;
32715
32705
  (function(AxAIMistralModel) {
32716
32706
  AxAIMistralModel["Mistral7B"] = "open-mistral-7b";
32717
32707
  AxAIMistralModel["Mistral8x7B"] = "open-mixtral-8x7b";
32718
32708
  AxAIMistralModel["MistralSmall"] = "mistral-small-latest";
32709
+ AxAIMistralModel["MistralMedium"] = "mistral-medium-latest";
32719
32710
  AxAIMistralModel["MistralLarge"] = "mistral-large-latest";
32720
- AxAIMistralModel["Codestral"] = "codestral-latest";
32721
- AxAIMistralModel["OpenCodestralMamba"] = "open-codestral-mamba";
32722
- AxAIMistralModel["OpenMistralNemo"] = "open-mistral-nemo-latest";
32723
32711
  })(AxAIMistralModel || (AxAIMistralModel = {}));
32724
32712
  var AxAIMistralEmbedModels;
32725
32713
  (function(AxAIMistralEmbedModels) {
32726
32714
  AxAIMistralEmbedModels["MistralEmbed"] = "mistral-embed";
32727
32715
  })(AxAIMistralEmbedModels || (AxAIMistralEmbedModels = {}));
32728
32716
 
32729
- // cspell:ignore mistral, mixtral, codestral, nemo
32730
32717
  var axModelInfoMistral = [
32731
32718
  {
32732
32719
  name: AxAIMistralModel.Mistral7B,
@@ -32747,28 +32734,16 @@ var axModelInfoMistral = [
32747
32734
  completionTokenCostPer1M: 6
32748
32735
  },
32749
32736
  {
32750
- name: AxAIMistralModel.MistralLarge,
32751
- currency: "USD",
32752
- promptTokenCostPer1M: 8,
32753
- completionTokenCostPer1M: 24
32754
- },
32755
- {
32756
- name: AxAIMistralModel.Codestral,
32737
+ name: AxAIMistralModel.MistralMedium,
32757
32738
  currency: "USD",
32758
- promptTokenCostPer1M: 1,
32759
- completionTokenCostPer1M: 3
32739
+ promptTokenCostPer1M: 2.7,
32740
+ completionTokenCostPer1M: 8.1
32760
32741
  },
32761
32742
  {
32762
- name: AxAIMistralModel.OpenCodestralMamba,
32763
- currency: "USD",
32764
- promptTokenCostPer1M: 0.25,
32765
- completionTokenCostPer1M: 0.25
32766
- },
32767
- {
32768
- name: AxAIMistralModel.OpenMistralNemo,
32743
+ name: AxAIMistralModel.MistralLarge,
32769
32744
  currency: "USD",
32770
- promptTokenCostPer1M: 0.3,
32771
- completionTokenCostPer1M: 0.3
32745
+ promptTokenCostPer1M: 8,
32746
+ completionTokenCostPer1M: 24
32772
32747
  }
32773
32748
  ];
32774
32749
 
@@ -32903,7 +32878,7 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
32903
32878
  _inherits$7(AxAIMistral, AxAIOpenAI);
32904
32879
  var _super = _create_super$7(AxAIMistral);
32905
32880
  function AxAIMistral(param) {
32906
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
32881
+ var apiKey = param.apiKey, config = param.config, options = param.options;
32907
32882
  _class_call_check$f(this, AxAIMistral);
32908
32883
  if (!apiKey || apiKey === "") {
32909
32884
  throw new Error("Mistral API key not set");
@@ -32914,8 +32889,7 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
32914
32889
  config: _config,
32915
32890
  options: options,
32916
32891
  apiURL: "https://api.mistral.ai/v1",
32917
- modelInfo: axModelInfoMistral,
32918
- modelMap: modelMap
32892
+ modelInfo: axModelInfoMistral
32919
32893
  });
32920
32894
  _get$2((_assert_this_initialized$7(_this), _get_prototype_of$7(AxAIMistral.prototype)), "setName", _this).call(_this, "Mistral");
32921
32895
  return _this;
@@ -33079,7 +33053,7 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
33079
33053
  _inherits$6(AxAIDeepSeek, AxAIOpenAI);
33080
33054
  var _super = _create_super$6(AxAIDeepSeek);
33081
33055
  function AxAIDeepSeek(param) {
33082
- var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
33056
+ var apiKey = param.apiKey, config = param.config, options = param.options;
33083
33057
  _class_call_check$e(this, AxAIDeepSeek);
33084
33058
  if (!apiKey || apiKey === "") {
33085
33059
  throw new Error("DeepSeek API key not set");
@@ -33090,8 +33064,7 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
33090
33064
  config: _config,
33091
33065
  options: options,
33092
33066
  apiURL: "https://api.deepseek.com",
33093
- modelInfo: axModelInfoDeepSeek,
33094
- modelMap: modelMap
33067
+ modelInfo: axModelInfoDeepSeek
33095
33068
  });
33096
33069
  _get$1((_assert_this_initialized$6(_this), _get_prototype_of$6(AxAIDeepSeek.prototype)), "setName", _this).call(_this, "DeepSeek");
33097
33070
  return _this;
@@ -33259,15 +33232,14 @@ var axAIOllamaDefaultConfig = function() {
33259
33232
  _inherits$5(AxAIOllama, AxAIOpenAI);
33260
33233
  var _super = _create_super$5(AxAIOllama);
33261
33234
  function AxAIOllama(param) {
33262
- var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options, modelMap = param.modelMap;
33235
+ var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options;
33263
33236
  _class_call_check$d(this, AxAIOllama);
33264
33237
  var _config = _object_spread$6({}, axAIOllamaDefaultConfig(), config);
33265
33238
  var _this = _super.call(this, {
33266
33239
  apiKey: apiKey,
33267
33240
  options: options,
33268
33241
  config: _config,
33269
- apiURL: new URL("/api", url).href,
33270
- modelMap: modelMap
33242
+ apiURL: new URL("/v1", url).href
33271
33243
  });
33272
33244
  _get((_assert_this_initialized$5(_this), _get_prototype_of$5(AxAIOllama.prototype)), "setName", _this).call(_this, "Ollama");
33273
33245
  return _this;
@@ -33475,6 +33447,18 @@ var AxAI = /*#__PURE__*/ function() {
33475
33447
  }
33476
33448
  }
33477
33449
  _create_class$5(AxAI, [
33450
+ {
33451
+ key: "setModelMap",
33452
+ value: function setModelMap(modelMap) {
33453
+ this.ai.setModelMap(modelMap);
33454
+ }
33455
+ },
33456
+ {
33457
+ key: "setEmbedModelMap",
33458
+ value: function setEmbedModelMap(modelMap) {
33459
+ this.ai.setEmbedModelMap(modelMap);
33460
+ }
33461
+ },
33478
33462
  {
33479
33463
  key: "getName",
33480
33464
  value: function getName() {
@@ -33505,12 +33489,6 @@ var AxAI = /*#__PURE__*/ function() {
33505
33489
  return this.ai.getFeatures();
33506
33490
  }
33507
33491
  },
33508
- {
33509
- key: "getModelMap",
33510
- value: function getModelMap() {
33511
- return this.ai.getModelMap();
33512
- }
33513
- },
33514
33492
  {
33515
33493
  key: "chat",
33516
33494
  value: function chat(req, options) {
@@ -34069,8 +34047,8 @@ var assertRequiredFields = function(sig, values) {
34069
34047
  });
34070
34048
  if (missingFields.length > 0) {
34071
34049
  throw new AxAssertionError({
34072
- message: "Output must include: t: ".concat(missingFields.map(function(f) {
34073
- return "`".concat(f.title, ":`");
34050
+ message: "Missing required fields: ".concat(missingFields.map(function(f) {
34051
+ return f.name;
34074
34052
  }).join(", ")),
34075
34053
  values: values
34076
34054
  });
@@ -38876,7 +38854,7 @@ var AxPromptTemplate = function AxPromptTemplate(sig, fieldTemplates) {
38876
38854
  this.outputFormat = {
38877
38855
  type: "text",
38878
38856
  text: [
38879
- "Use the following output format."
38857
+ "Follow the following format."
38880
38858
  ].concat(_to_consumable_array$2(this.renderOutFields(this.sig.getOutputFields())), [
38881
38859
  "---\n\n"
38882
38860
  ]).join("\n\n")
@@ -40692,7 +40670,9 @@ let DspService = class DspService {
40692
40670
  }
40693
40671
  async modelOpenAI(params, _pinsSettingsList, context) {
40694
40672
  var _context_privates_OPENAI_API_KEY, _context_privates_OPENAI_SERVER;
40695
- const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config, options } = params;
40673
+ const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config = {
40674
+ model: 'gpt-4o-mini'
40675
+ }, options } = params;
40696
40676
  const modelInstance = new AxAIOpenAI({
40697
40677
  apiKey,
40698
40678
  apiURL,