@digipair/skill-dsp 0.21.2 → 0.21.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs.js +157 -132
- package/index.esm.js +162 -137
- package/libs/skill-dsp/src/lib/skill-dsp.d.ts +1 -1
- package/package.json +1 -1
package/index.esm.js
CHANGED
|
@@ -20332,6 +20332,7 @@ about the parse state.
|
|
|
20332
20332
|
var _a;
|
|
20333
20333
|
var depth = action >> 19 /* Action.ReduceDepthShift */ , type = action & 65535 /* Action.ValueMask */ ;
|
|
20334
20334
|
var parser = this.p.parser;
|
|
20335
|
+
if (this.reducePos < this.pos - 25 /* Lookahead.Margin */ ) this.setLookAhead(this.pos);
|
|
20335
20336
|
var dPrec = parser.dynamicPrecedence(type);
|
|
20336
20337
|
if (dPrec) this.score += dPrec;
|
|
20337
20338
|
if (depth == 0) {
|
|
@@ -21284,7 +21285,7 @@ function cutAt(tree, pos, side) {
|
|
|
21284
21285
|
cursor.moveTo(pos);
|
|
21285
21286
|
for(;;){
|
|
21286
21287
|
if (!(side < 0 ? cursor.childBefore(pos) : cursor.childAfter(pos))) for(;;){
|
|
21287
|
-
if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /*
|
|
21288
|
+
if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Lookahead.Margin */ )) : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Lookahead.Margin */ ));
|
|
21288
21289
|
if (side < 0 ? cursor.prevSibling() : cursor.nextSibling()) break;
|
|
21289
21290
|
if (!cursor.parent()) return side < 0 ? 0 : tree.length;
|
|
21290
21291
|
}
|
|
@@ -21407,7 +21408,7 @@ var TokenCache = /*#__PURE__*/ function() {
|
|
|
21407
21408
|
token.mask = mask;
|
|
21408
21409
|
token.context = context;
|
|
21409
21410
|
}
|
|
21410
|
-
if (token.lookAhead > token.end + 25 /*
|
|
21411
|
+
if (token.lookAhead > token.end + 25 /* Lookahead.Margin */ ) lookAhead = Math.max(token.lookAhead, lookAhead);
|
|
21411
21412
|
if (token.value != 0 /* Term.Err */ ) {
|
|
21412
21413
|
var startIndex = actionIndex;
|
|
21413
21414
|
if (token.extended > -1) actionIndex = this.addActions(stack, token.extended, token.end, actionIndex);
|
|
@@ -23498,14 +23499,14 @@ function indent(str, spaces) {
|
|
|
23498
23499
|
var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
|
|
23499
23500
|
// match is required
|
|
23500
23501
|
if (!match) {
|
|
23501
|
-
return i = i1,
|
|
23502
|
+
return i = i1, nextMatch = nextMatch1, tokens = tokens1, {
|
|
23502
23503
|
v: nextMatch1
|
|
23503
23504
|
};
|
|
23504
23505
|
}
|
|
23505
23506
|
var token = match.token, offset = match.offset;
|
|
23506
23507
|
i1 += offset;
|
|
23507
23508
|
if (token === " ") {
|
|
23508
|
-
return i = i1,
|
|
23509
|
+
return i = i1, nextMatch = nextMatch1, tokens = tokens1, "continue";
|
|
23509
23510
|
}
|
|
23510
23511
|
tokens1 = _to_consumable_array$6(tokens1).concat([
|
|
23511
23512
|
token
|
|
@@ -23524,7 +23525,7 @@ function indent(str, spaces) {
|
|
|
23524
23525
|
if (contextKeys.some(function(el) {
|
|
23525
23526
|
return el.startsWith(name);
|
|
23526
23527
|
})) {
|
|
23527
|
-
return i = i1,
|
|
23528
|
+
return i = i1, nextMatch = nextMatch1, tokens = tokens1, "continue";
|
|
23528
23529
|
}
|
|
23529
23530
|
if (dateTimeIdentifiers.some(function(el) {
|
|
23530
23531
|
return el === name;
|
|
@@ -23543,9 +23544,9 @@ function indent(str, spaces) {
|
|
|
23543
23544
|
if (dateTimeIdentifiers.some(function(el) {
|
|
23544
23545
|
return el.startsWith(name);
|
|
23545
23546
|
})) {
|
|
23546
|
-
return i = i1,
|
|
23547
|
+
return i = i1, nextMatch = nextMatch1, tokens = tokens1, "continue";
|
|
23547
23548
|
}
|
|
23548
|
-
return i = i1,
|
|
23549
|
+
return i = i1, nextMatch = nextMatch1, tokens = tokens1, {
|
|
23549
23550
|
v: nextMatch1
|
|
23550
23551
|
};
|
|
23551
23552
|
};
|
|
@@ -24348,6 +24349,7 @@ var parser = LRParser.deserialize({
|
|
|
24348
24349
|
},
|
|
24349
24350
|
dynamicPrecedences: {
|
|
24350
24351
|
"31": -1,
|
|
24352
|
+
"67": 1,
|
|
24351
24353
|
"71": -1,
|
|
24352
24354
|
"73": -1
|
|
24353
24355
|
},
|
|
@@ -27349,7 +27351,9 @@ const applyTemplate = (value, context)=>{
|
|
|
27349
27351
|
result = template(context);
|
|
27350
27352
|
if (result.startsWith('EVALUATE:')) {
|
|
27351
27353
|
const path = result.replace(/^EVALUATE:/, '');
|
|
27352
|
-
result = evaluate(path, context
|
|
27354
|
+
result = evaluate(path, _extends({}, context, {
|
|
27355
|
+
getTime: (time)=>new Date(time).getTime()
|
|
27356
|
+
}));
|
|
27353
27357
|
}
|
|
27354
27358
|
} else if (typeof value === 'object' && Array.isArray(value)) {
|
|
27355
27359
|
result = value.map((item)=>isPinsSettings(item) ? item : applyTemplate(item, context));
|
|
@@ -28603,7 +28607,7 @@ var axBaseAIDefaultConfig = function() {
|
|
|
28603
28607
|
};
|
|
28604
28608
|
var AxBaseAI = /*#__PURE__*/ function() {
|
|
28605
28609
|
function AxBaseAI(param) {
|
|
28606
|
-
var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor;
|
|
28610
|
+
var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor, modelMap = param.modelMap;
|
|
28607
28611
|
_class_call_check$p(this, AxBaseAI);
|
|
28608
28612
|
_define_property$p(this, "generateChatReq", void 0);
|
|
28609
28613
|
_define_property$p(this, "generateEmbedReq", void 0);
|
|
@@ -28615,48 +28619,29 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28615
28619
|
_define_property$p(this, "fetch", void 0);
|
|
28616
28620
|
_define_property$p(this, "tracer", void 0);
|
|
28617
28621
|
_define_property$p(this, "modelMap", void 0);
|
|
28618
|
-
_define_property$p(this, "
|
|
28622
|
+
_define_property$p(this, "modelInfo", void 0);
|
|
28619
28623
|
_define_property$p(this, "modelUsage", void 0);
|
|
28620
28624
|
_define_property$p(this, "embedModelUsage", void 0);
|
|
28625
|
+
_define_property$p(this, "models", void 0);
|
|
28621
28626
|
_define_property$p(this, "apiURL", void 0);
|
|
28622
28627
|
_define_property$p(this, "name", void 0);
|
|
28623
28628
|
_define_property$p(this, "headers", void 0);
|
|
28624
|
-
_define_property$p(this, "modelInfo", void 0);
|
|
28625
|
-
_define_property$p(this, "embedModelInfo", void 0);
|
|
28626
28629
|
_define_property$p(this, "supportFor", void 0);
|
|
28627
28630
|
this.name = name;
|
|
28628
28631
|
this.apiURL = apiURL;
|
|
28629
28632
|
this.headers = headers;
|
|
28630
28633
|
this.supportFor = supportFor;
|
|
28631
28634
|
this.tracer = options.tracer;
|
|
28632
|
-
|
|
28633
|
-
|
|
28634
|
-
|
|
28635
|
-
|
|
28636
|
-
|
|
28637
|
-
|
|
28638
|
-
|
|
28639
|
-
|
|
28640
|
-
name: model,
|
|
28641
|
-
currency: "usd",
|
|
28642
|
-
promptTokenCostPer1M: 0,
|
|
28643
|
-
completionTokenCostPer1M: 0
|
|
28644
|
-
};
|
|
28645
|
-
} else {
|
|
28635
|
+
this.modelInfo = modelInfo;
|
|
28636
|
+
this.modelMap = modelMap;
|
|
28637
|
+
var _modelMap_models_model, _models_embedModel, _modelMap_;
|
|
28638
|
+
this.models = {
|
|
28639
|
+
model: (_modelMap_models_model = modelMap === null || modelMap === void 0 ? void 0 : modelMap[models.model]) !== null && _modelMap_models_model !== void 0 ? _modelMap_models_model : models.model,
|
|
28640
|
+
embedModel: (_modelMap_ = modelMap === null || modelMap === void 0 ? void 0 : modelMap[(_models_embedModel = models.embedModel) !== null && _models_embedModel !== void 0 ? _models_embedModel : ""]) !== null && _modelMap_ !== void 0 ? _modelMap_ : models.embedModel
|
|
28641
|
+
};
|
|
28642
|
+
if (!models.model || typeof models.model !== "string" || models.model === "") {
|
|
28646
28643
|
throw new Error("No model defined");
|
|
28647
28644
|
}
|
|
28648
|
-
if (typeof embedModel === "string") {
|
|
28649
|
-
var embedModelName = embedModel === null || embedModel === void 0 ? void 0 : embedModel.replace(/-0\d+$|-\d{2,}$/, "");
|
|
28650
|
-
var _modelInfo_filter_at1;
|
|
28651
|
-
this.embedModelInfo = (_modelInfo_filter_at1 = modelInfo.filter(function(v) {
|
|
28652
|
-
return v.name === embedModelName;
|
|
28653
|
-
}).at(0)) !== null && _modelInfo_filter_at1 !== void 0 ? _modelInfo_filter_at1 : {
|
|
28654
|
-
name: embedModel !== null && embedModel !== void 0 ? embedModel : "",
|
|
28655
|
-
currency: "usd",
|
|
28656
|
-
promptTokenCostPer1M: 0,
|
|
28657
|
-
completionTokenCostPer1M: 0
|
|
28658
|
-
};
|
|
28659
|
-
}
|
|
28660
28645
|
this.setOptions(options);
|
|
28661
28646
|
}
|
|
28662
28647
|
_create_class$c(AxBaseAI, [
|
|
@@ -28696,21 +28681,27 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28696
28681
|
}
|
|
28697
28682
|
},
|
|
28698
28683
|
{
|
|
28699
|
-
key: "
|
|
28700
|
-
value: function
|
|
28701
|
-
|
|
28702
|
-
|
|
28703
|
-
|
|
28704
|
-
|
|
28705
|
-
|
|
28706
|
-
|
|
28707
|
-
|
|
28684
|
+
key: "_getModelInfo",
|
|
28685
|
+
value: function _getModelInfo(model) {
|
|
28686
|
+
var _this_modelMap;
|
|
28687
|
+
var _this_modelMap_model;
|
|
28688
|
+
var _model = (_this_modelMap_model = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[model]) !== null && _this_modelMap_model !== void 0 ? _this_modelMap_model : model;
|
|
28689
|
+
var modelName = _model.replace(/-0\d+$|-\d{2,}$/, "");
|
|
28690
|
+
var _this_modelInfo_filter_at;
|
|
28691
|
+
return (_this_modelInfo_filter_at = this.modelInfo.filter(function(v) {
|
|
28692
|
+
return v.name === modelName;
|
|
28693
|
+
}).at(0)) !== null && _this_modelInfo_filter_at !== void 0 ? _this_modelInfo_filter_at : {
|
|
28694
|
+
name: model,
|
|
28695
|
+
currency: "usd",
|
|
28696
|
+
promptTokenCostPer1M: 0,
|
|
28697
|
+
completionTokenCostPer1M: 0
|
|
28698
|
+
};
|
|
28708
28699
|
}
|
|
28709
28700
|
},
|
|
28710
28701
|
{
|
|
28711
28702
|
key: "getModelInfo",
|
|
28712
28703
|
value: function getModelInfo() {
|
|
28713
|
-
return _object_spread_props$8(_object_spread$h({}, this.
|
|
28704
|
+
return _object_spread_props$8(_object_spread$h({}, this._getModelInfo(this.models.model)), {
|
|
28714
28705
|
provider: this.name
|
|
28715
28706
|
});
|
|
28716
28707
|
}
|
|
@@ -28718,9 +28709,17 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28718
28709
|
{
|
|
28719
28710
|
key: "getEmbedModelInfo",
|
|
28720
28711
|
value: function getEmbedModelInfo() {
|
|
28721
|
-
|
|
28722
|
-
|
|
28723
|
-
|
|
28712
|
+
if (this.models.embedModel) {
|
|
28713
|
+
return _object_spread_props$8(_object_spread$h({}, this._getModelInfo(this.models.embedModel)), {
|
|
28714
|
+
provider: this.name
|
|
28715
|
+
});
|
|
28716
|
+
}
|
|
28717
|
+
}
|
|
28718
|
+
},
|
|
28719
|
+
{
|
|
28720
|
+
key: "getModelMap",
|
|
28721
|
+
value: function getModelMap() {
|
|
28722
|
+
return this.modelMap;
|
|
28724
28723
|
}
|
|
28725
28724
|
},
|
|
28726
28725
|
{
|
|
@@ -28743,13 +28742,14 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28743
28742
|
},
|
|
28744
28743
|
{
|
|
28745
28744
|
key: "chat",
|
|
28746
|
-
value: function chat(
|
|
28745
|
+
value: function chat(req, options) {
|
|
28747
28746
|
var _this = this;
|
|
28748
28747
|
return _async_to_generator$7(function() {
|
|
28749
|
-
var _mc_stopSequences, _this_tracer, mc, _obj;
|
|
28748
|
+
var _this_modelMap, _this_modelMap_req_model, model, _mc_stopSequences, _this_tracer, mc, _obj;
|
|
28750
28749
|
return _ts_generator$8(this, function(_state) {
|
|
28751
28750
|
switch(_state.label){
|
|
28752
28751
|
case 0:
|
|
28752
|
+
model = req.model ? (_this_modelMap_req_model = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.model]) !== null && _this_modelMap_req_model !== void 0 ? _this_modelMap_req_model : req.model : _this.models.model;
|
|
28753
28753
|
if (!_this.tracer) return [
|
|
28754
28754
|
3,
|
|
28755
28755
|
2
|
|
@@ -28759,7 +28759,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28759
28759
|
4,
|
|
28760
28760
|
(_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Chat Request", {
|
|
28761
28761
|
kind: AxSpanKind.SERVER,
|
|
28762
|
-
attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL,
|
|
28762
|
+
attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, model), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MAX_TOKENS, mc.maxTokens), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TEMPERATURE, mc.temperature), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_P, mc.topP), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_K, mc.topK), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_FREQUENCY_PENALTY, mc.frequencyPenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_PRESENCE_PENALTY, mc.presencePenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_STOP_SEQUENCES, (_mc_stopSequences = mc.stopSequences) === null || _mc_stopSequences === void 0 ? void 0 : _mc_stopSequences.join(", ")), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING, mc.stream), _obj)
|
|
28763
28763
|
}, function() {
|
|
28764
28764
|
var _ref = _async_to_generator$7(function(span) {
|
|
28765
28765
|
var res;
|
|
@@ -28768,7 +28768,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28768
28768
|
case 0:
|
|
28769
28769
|
return [
|
|
28770
28770
|
4,
|
|
28771
|
-
_this._chat(
|
|
28771
|
+
_this._chat(model, req, options, span)
|
|
28772
28772
|
];
|
|
28773
28773
|
case 1:
|
|
28774
28774
|
res = _state.sent();
|
|
@@ -28793,7 +28793,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28793
28793
|
case 2:
|
|
28794
28794
|
return [
|
|
28795
28795
|
4,
|
|
28796
|
-
_this._chat(
|
|
28796
|
+
_this._chat(model, req, options)
|
|
28797
28797
|
];
|
|
28798
28798
|
case 3:
|
|
28799
28799
|
return [
|
|
@@ -28807,10 +28807,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28807
28807
|
},
|
|
28808
28808
|
{
|
|
28809
28809
|
key: "_chat",
|
|
28810
|
-
value: function _chat(
|
|
28810
|
+
value: function _chat(model, chatReq, options, span) {
|
|
28811
28811
|
var _this = this;
|
|
28812
28812
|
return _async_to_generator$7(function() {
|
|
28813
|
-
var
|
|
28813
|
+
var _chatReq_modelConfig, reqFn, _options_stream, stream, functions, req, fn, rv, _tmp, respFn, wrappedRespFn, doneCb, st, res;
|
|
28814
28814
|
return _ts_generator$8(this, function(_state) {
|
|
28815
28815
|
switch(_state.label){
|
|
28816
28816
|
case 0:
|
|
@@ -28818,11 +28818,14 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28818
28818
|
throw new Error("generateChatReq not implemented");
|
|
28819
28819
|
}
|
|
28820
28820
|
reqFn = _this.generateChatReq;
|
|
28821
|
-
stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (
|
|
28822
|
-
|
|
28823
|
-
|
|
28821
|
+
stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (_chatReq_modelConfig = chatReq.modelConfig) === null || _chatReq_modelConfig === void 0 ? void 0 : _chatReq_modelConfig.stream;
|
|
28822
|
+
if (chatReq.functions && chatReq.functions.length > 0) {
|
|
28823
|
+
functions = chatReq.functions;
|
|
28824
|
+
}
|
|
28825
|
+
req = _object_spread_props$8(_object_spread$h({}, chatReq), {
|
|
28826
|
+
model: model,
|
|
28824
28827
|
functions: functions,
|
|
28825
|
-
modelConfig: _object_spread_props$8(_object_spread$h({},
|
|
28828
|
+
modelConfig: _object_spread_props$8(_object_spread$h({}, chatReq.modelConfig), {
|
|
28826
28829
|
stream: stream
|
|
28827
28830
|
})
|
|
28828
28831
|
});
|
|
@@ -28959,10 +28962,14 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28959
28962
|
value: function embed(req, options) {
|
|
28960
28963
|
var _this = this;
|
|
28961
28964
|
return _async_to_generator$7(function() {
|
|
28962
|
-
var _this_tracer, _obj;
|
|
28965
|
+
var _this_modelMap, _this_modelMap_req_embedModel, embedModel, _this_tracer, _req_embedModel, _obj;
|
|
28963
28966
|
return _ts_generator$8(this, function(_state) {
|
|
28964
28967
|
switch(_state.label){
|
|
28965
28968
|
case 0:
|
|
28969
|
+
embedModel = req.embedModel ? (_this_modelMap_req_embedModel = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.embedModel]) !== null && _this_modelMap_req_embedModel !== void 0 ? _this_modelMap_req_embedModel : req.embedModel : _this.models.embedModel;
|
|
28970
|
+
if (!embedModel) {
|
|
28971
|
+
throw new Error("No embed model defined");
|
|
28972
|
+
}
|
|
28966
28973
|
if (!_this.tracer) return [
|
|
28967
28974
|
3,
|
|
28968
28975
|
2
|
|
@@ -28971,7 +28978,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28971
28978
|
4,
|
|
28972
28979
|
(_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Embed Request", {
|
|
28973
28980
|
kind: AxSpanKind.SERVER,
|
|
28974
|
-
attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.
|
|
28981
|
+
attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, (_req_embedModel = req.embedModel) !== null && _req_embedModel !== void 0 ? _req_embedModel : _this.models.embedModel), _obj)
|
|
28975
28982
|
}, function() {
|
|
28976
28983
|
var _ref = _async_to_generator$7(function(span) {
|
|
28977
28984
|
var res;
|
|
@@ -28980,7 +28987,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28980
28987
|
case 0:
|
|
28981
28988
|
return [
|
|
28982
28989
|
4,
|
|
28983
|
-
_this._embed(req, options, span)
|
|
28990
|
+
_this._embed(embedModel, req, options, span)
|
|
28984
28991
|
];
|
|
28985
28992
|
case 1:
|
|
28986
28993
|
res = _state.sent();
|
|
@@ -29005,7 +29012,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
29005
29012
|
case 2:
|
|
29006
29013
|
return [
|
|
29007
29014
|
2,
|
|
29008
|
-
_this._embed(req, options)
|
|
29015
|
+
_this._embed(embedModel, req, options)
|
|
29009
29016
|
];
|
|
29010
29017
|
}
|
|
29011
29018
|
});
|
|
@@ -29014,10 +29021,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
29014
29021
|
},
|
|
29015
29022
|
{
|
|
29016
29023
|
key: "_embed",
|
|
29017
|
-
value: function _embed(
|
|
29024
|
+
value: function _embed(embedModel, embedReq, options, span) {
|
|
29018
29025
|
var _this = this;
|
|
29019
29026
|
return _async_to_generator$7(function() {
|
|
29020
|
-
var fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
|
|
29027
|
+
var req, fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
|
|
29021
29028
|
return _ts_generator$8(this, function(_state) {
|
|
29022
29029
|
switch(_state.label){
|
|
29023
29030
|
case 0:
|
|
@@ -29027,6 +29034,9 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
29027
29034
|
if (!_this.generateEmbedResp) {
|
|
29028
29035
|
throw new Error("generateEmbedResp not implemented");
|
|
29029
29036
|
}
|
|
29037
|
+
req = _object_spread_props$8(_object_spread$h({}, embedReq), {
|
|
29038
|
+
embedModel: embedModel
|
|
29039
|
+
});
|
|
29030
29040
|
fn = function() {
|
|
29031
29041
|
var _ref = _async_to_generator$7(function() {
|
|
29032
29042
|
var _this_generateEmbedReq, apiConfig, reqValue, res;
|
|
@@ -29108,22 +29118,6 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
29108
29118
|
var headers = arguments.length > 0 && arguments[0] !== void 0 ? arguments[0] : {};
|
|
29109
29119
|
return _object_spread$h({}, headers, this.headers);
|
|
29110
29120
|
}
|
|
29111
|
-
},
|
|
29112
|
-
{
|
|
29113
|
-
key: "getEmbedModel",
|
|
29114
|
-
value: function getEmbedModel(name) {
|
|
29115
|
-
var _this_embedModelMap;
|
|
29116
|
-
var _this_embedModelMap_name;
|
|
29117
|
-
return name ? (_this_embedModelMap_name = (_this_embedModelMap = this.embedModelMap) === null || _this_embedModelMap === void 0 ? void 0 : _this_embedModelMap[name]) !== null && _this_embedModelMap_name !== void 0 ? _this_embedModelMap_name : name : undefined;
|
|
29118
|
-
}
|
|
29119
|
-
},
|
|
29120
|
-
{
|
|
29121
|
-
key: "getModel",
|
|
29122
|
-
value: function getModel(name) {
|
|
29123
|
-
var _this_modelMap;
|
|
29124
|
-
var _this_modelMap_name;
|
|
29125
|
-
return (_this_modelMap_name = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[name]) !== null && _this_modelMap_name !== void 0 ? _this_modelMap_name : name;
|
|
29126
|
-
}
|
|
29127
29121
|
}
|
|
29128
29122
|
]);
|
|
29129
29123
|
return AxBaseAI;
|
|
@@ -29287,6 +29281,7 @@ var AxAIOpenAIModel;
|
|
|
29287
29281
|
(function(AxAIOpenAIModel) {
|
|
29288
29282
|
AxAIOpenAIModel["GPT4"] = "gpt-4";
|
|
29289
29283
|
AxAIOpenAIModel["GPT4O"] = "gpt-4o";
|
|
29284
|
+
AxAIOpenAIModel["GPT4OMini"] = "gpt-4o-mini";
|
|
29290
29285
|
AxAIOpenAIModel["GPT4Turbo"] = "gpt-4-turbo";
|
|
29291
29286
|
AxAIOpenAIModel["GPT35Turbo"] = "gpt-3.5-turbo";
|
|
29292
29287
|
AxAIOpenAIModel["GPT35TurboInstruct"] = "gpt-3.5-turbo-instruct";
|
|
@@ -29317,6 +29312,12 @@ var AxAIOpenAIEmbedModel;
|
|
|
29317
29312
|
promptTokenCostPer1M: 5,
|
|
29318
29313
|
completionTokenCostPer1M: 15
|
|
29319
29314
|
},
|
|
29315
|
+
{
|
|
29316
|
+
name: AxAIOpenAIModel.GPT4OMini,
|
|
29317
|
+
currency: "usd",
|
|
29318
|
+
promptTokenCostPer1M: 0.15,
|
|
29319
|
+
completionTokenCostPer1M: 0.6
|
|
29320
|
+
},
|
|
29320
29321
|
{
|
|
29321
29322
|
name: AxAIOpenAIModel.GPT4Turbo,
|
|
29322
29323
|
currency: "usd",
|
|
@@ -29472,7 +29473,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
29472
29473
|
_inherits$f(AxAIOpenAI, AxBaseAI);
|
|
29473
29474
|
var _super = _create_super$f(AxAIOpenAI);
|
|
29474
29475
|
function AxAIOpenAI(param) {
|
|
29475
|
-
var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo;
|
|
29476
|
+
var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo, modelMap = param.modelMap;
|
|
29476
29477
|
_class_call_check$o(this, AxAIOpenAI);
|
|
29477
29478
|
var _this;
|
|
29478
29479
|
if (!apiKey || apiKey === "") {
|
|
@@ -29494,14 +29495,16 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
29494
29495
|
supportFor: {
|
|
29495
29496
|
functions: true,
|
|
29496
29497
|
streaming: true
|
|
29497
|
-
}
|
|
29498
|
+
},
|
|
29499
|
+
modelMap: modelMap
|
|
29498
29500
|
});
|
|
29499
29501
|
_define_property$o(_assert_this_initialized$f(_this), "config", void 0);
|
|
29500
29502
|
_define_property$o(_assert_this_initialized$f(_this), "streamingUsage", void 0);
|
|
29503
|
+
_define_property$o(_assert_this_initialized$f(_this), "dimensions", void 0);
|
|
29501
29504
|
_define_property$o(_assert_this_initialized$f(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
29502
29505
|
_config) {
|
|
29503
29506
|
var _req_functions, _req_modelConfig, _req_modelConfig1, _this_config, _this_config1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6, _req_modelConfig7;
|
|
29504
|
-
var model =
|
|
29507
|
+
var model = req.model;
|
|
29505
29508
|
if (!req.chatPrompt || req.chatPrompt.length === 0) {
|
|
29506
29509
|
throw new Error("Chat prompt is empty");
|
|
29507
29510
|
}
|
|
@@ -29554,7 +29557,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
29554
29557
|
];
|
|
29555
29558
|
});
|
|
29556
29559
|
_define_property$o(_assert_this_initialized$f(_this), "generateEmbedReq", function(req) {
|
|
29557
|
-
var model =
|
|
29560
|
+
var model = req.embedModel;
|
|
29558
29561
|
if (!model) {
|
|
29559
29562
|
throw new Error("Embed model not set");
|
|
29560
29563
|
}
|
|
@@ -29566,7 +29569,8 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
29566
29569
|
};
|
|
29567
29570
|
var reqValue = {
|
|
29568
29571
|
model: model,
|
|
29569
|
-
input: req.texts
|
|
29572
|
+
input: req.texts,
|
|
29573
|
+
dimensions: _this.dimensions
|
|
29570
29574
|
};
|
|
29571
29575
|
return [
|
|
29572
29576
|
apiConfig,
|
|
@@ -29675,6 +29679,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
29675
29679
|
_this.config = _config;
|
|
29676
29680
|
var _options_streamingUsage;
|
|
29677
29681
|
_this.streamingUsage = (_options_streamingUsage = options === null || options === void 0 ? void 0 : options.streamingUsage) !== null && _options_streamingUsage !== void 0 ? _options_streamingUsage : true;
|
|
29682
|
+
_this.dimensions = config === null || config === void 0 ? void 0 : config.dimensions;
|
|
29678
29683
|
return _this;
|
|
29679
29684
|
}
|
|
29680
29685
|
_create_class$b(AxAIOpenAI, [
|
|
@@ -29908,7 +29913,7 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
29908
29913
|
_inherits$e(AxAIAzureOpenAI, AxAIOpenAI);
|
|
29909
29914
|
var _super = _create_super$e(AxAIAzureOpenAI);
|
|
29910
29915
|
function AxAIAzureOpenAI(param) {
|
|
29911
|
-
var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options;
|
|
29916
|
+
var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options, modelMap = param.modelMap;
|
|
29912
29917
|
_class_call_check$n(this, AxAIAzureOpenAI);
|
|
29913
29918
|
if (!apiKey || apiKey === "") {
|
|
29914
29919
|
throw new Error("Azure OpenAPI API key not set");
|
|
@@ -29923,7 +29928,8 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
29923
29928
|
var _this = _super.call(this, {
|
|
29924
29929
|
apiKey: apiKey,
|
|
29925
29930
|
config: _config,
|
|
29926
|
-
options: options
|
|
29931
|
+
options: options,
|
|
29932
|
+
modelMap: modelMap
|
|
29927
29933
|
});
|
|
29928
29934
|
var host = resourceName.includes("://") ? resourceName : "https://".concat(resourceName, ".openai.azure.com/");
|
|
29929
29935
|
_get$5((_assert_this_initialized$e(_this), _get_prototype_of$e(AxAIAzureOpenAI.prototype)), "setName", _this).call(_this, "Azure OpenAI");
|
|
@@ -30069,7 +30075,7 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
30069
30075
|
_inherits$d(AxAIHuggingFace, AxBaseAI);
|
|
30070
30076
|
var _super = _create_super$d(AxAIHuggingFace);
|
|
30071
30077
|
function AxAIHuggingFace(param) {
|
|
30072
|
-
var apiKey = param.apiKey, config = param.config, options = param.options;
|
|
30078
|
+
var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
|
|
30073
30079
|
_class_call_check$m(this, AxAIHuggingFace);
|
|
30074
30080
|
var _this;
|
|
30075
30081
|
if (!apiKey || apiKey === "") {
|
|
@@ -30090,13 +30096,14 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
30090
30096
|
supportFor: {
|
|
30091
30097
|
functions: false,
|
|
30092
30098
|
streaming: false
|
|
30093
|
-
}
|
|
30099
|
+
},
|
|
30100
|
+
modelMap: modelMap
|
|
30094
30101
|
});
|
|
30095
30102
|
_define_property$m(_assert_this_initialized$d(_this), "config", void 0);
|
|
30096
30103
|
_define_property$m(_assert_this_initialized$d(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
30097
30104
|
_config) {
|
|
30098
30105
|
var _req_chatPrompt, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4;
|
|
30099
|
-
var model =
|
|
30106
|
+
var model = req.model;
|
|
30100
30107
|
var functionsList = req.functions ? "Functions:\n".concat(JSON.stringify(req.functions, null, 2), "\n") : "";
|
|
30101
30108
|
var prompt = (_req_chatPrompt = req.chatPrompt) === null || _req_chatPrompt === void 0 ? void 0 : _req_chatPrompt.map(function(msg) {
|
|
30102
30109
|
switch(msg.role){
|
|
@@ -30318,7 +30325,7 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
30318
30325
|
_inherits$c(AxAITogether, AxAIOpenAI);
|
|
30319
30326
|
var _super = _create_super$c(AxAITogether);
|
|
30320
30327
|
function AxAITogether(param) {
|
|
30321
|
-
var apiKey = param.apiKey, config = param.config, options = param.options;
|
|
30328
|
+
var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
|
|
30322
30329
|
_class_call_check$l(this, AxAITogether);
|
|
30323
30330
|
if (!apiKey || apiKey === "") {
|
|
30324
30331
|
throw new Error("Together API key not set");
|
|
@@ -30329,7 +30336,8 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
30329
30336
|
config: _config,
|
|
30330
30337
|
options: options,
|
|
30331
30338
|
apiURL: "https://api.together.xyz/v1",
|
|
30332
|
-
modelInfo: axModelInfoTogether
|
|
30339
|
+
modelInfo: axModelInfoTogether,
|
|
30340
|
+
modelMap: modelMap
|
|
30333
30341
|
});
|
|
30334
30342
|
_get$4((_assert_this_initialized$c(_this), _get_prototype_of$c(AxAITogether.prototype)), "setName", _this).call(_this, "Together");
|
|
30335
30343
|
return _this;
|
|
@@ -30603,7 +30611,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
30603
30611
|
_inherits$b(AxAICohere, AxBaseAI);
|
|
30604
30612
|
var _super = _create_super$b(AxAICohere);
|
|
30605
30613
|
function AxAICohere(param) {
|
|
30606
|
-
var apiKey = param.apiKey, config = param.config, options = param.options;
|
|
30614
|
+
var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
|
|
30607
30615
|
_class_call_check$k(this, AxAICohere);
|
|
30608
30616
|
var _this;
|
|
30609
30617
|
if (!apiKey || apiKey === "") {
|
|
@@ -30624,13 +30632,14 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
30624
30632
|
functions: true,
|
|
30625
30633
|
streaming: true
|
|
30626
30634
|
},
|
|
30627
|
-
options: options
|
|
30635
|
+
options: options,
|
|
30636
|
+
modelMap: modelMap
|
|
30628
30637
|
});
|
|
30629
30638
|
_define_property$k(_assert_this_initialized$b(_this), "config", void 0);
|
|
30630
30639
|
_define_property$k(_assert_this_initialized$b(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
30631
30640
|
_config) {
|
|
30632
30641
|
var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6;
|
|
30633
|
-
var model =
|
|
30642
|
+
var model = req.model;
|
|
30634
30643
|
var lastChatMsg = req.chatPrompt.at(-1);
|
|
30635
30644
|
var restOfChat = req.chatPrompt.slice(0, -1);
|
|
30636
30645
|
var message;
|
|
@@ -30725,7 +30734,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
30725
30734
|
];
|
|
30726
30735
|
});
|
|
30727
30736
|
_define_property$k(_assert_this_initialized$b(_this), "generateEmbedReq", function(req) {
|
|
30728
|
-
var model =
|
|
30737
|
+
var model = req.embedModel;
|
|
30729
30738
|
if (!model) {
|
|
30730
30739
|
throw new Error("Embed model not set");
|
|
30731
30740
|
}
|
|
@@ -31124,7 +31133,7 @@ var safetySettings = [
|
|
|
31124
31133
|
_inherits$a(AxAIGoogleGemini, AxBaseAI);
|
|
31125
31134
|
var _super = _create_super$a(AxAIGoogleGemini);
|
|
31126
31135
|
function AxAIGoogleGemini(param) {
|
|
31127
|
-
var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options;
|
|
31136
|
+
var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options, modelMap = param.modelMap;
|
|
31128
31137
|
_class_call_check$j(this, AxAIGoogleGemini);
|
|
31129
31138
|
var _this;
|
|
31130
31139
|
if (!apiKey || apiKey === "") {
|
|
@@ -31148,14 +31157,15 @@ var safetySettings = [
|
|
|
31148
31157
|
supportFor: {
|
|
31149
31158
|
functions: true,
|
|
31150
31159
|
streaming: true
|
|
31151
|
-
}
|
|
31160
|
+
},
|
|
31161
|
+
modelMap: modelMap
|
|
31152
31162
|
});
|
|
31153
31163
|
_define_property$j(_assert_this_initialized$a(_this), "options", void 0);
|
|
31154
31164
|
_define_property$j(_assert_this_initialized$a(_this), "config", void 0);
|
|
31155
31165
|
_define_property$j(_assert_this_initialized$a(_this), "apiKey", void 0);
|
|
31156
31166
|
_define_property$j(_assert_this_initialized$a(_this), "generateChatReq", function(req) {
|
|
31157
31167
|
var _req_modelConfig, _this_options, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
|
|
31158
|
-
var model =
|
|
31168
|
+
var model = req.model;
|
|
31159
31169
|
var _req_modelConfig_stream;
|
|
31160
31170
|
var stream = (_req_modelConfig_stream = (_req_modelConfig = req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream) !== null && _req_modelConfig_stream !== void 0 ? _req_modelConfig_stream : _this.config.stream;
|
|
31161
31171
|
if (!req.chatPrompt || req.chatPrompt.length === 0) {
|
|
@@ -31335,7 +31345,7 @@ var safetySettings = [
|
|
|
31335
31345
|
];
|
|
31336
31346
|
});
|
|
31337
31347
|
_define_property$j(_assert_this_initialized$a(_this), "generateEmbedReq", function(req) {
|
|
31338
|
-
var model =
|
|
31348
|
+
var model = req.embedModel;
|
|
31339
31349
|
if (!model) {
|
|
31340
31350
|
throw new Error("Embed model not set");
|
|
31341
31351
|
}
|
|
@@ -31699,7 +31709,7 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
31699
31709
|
_inherits$9(AxAIAnthropic, AxBaseAI);
|
|
31700
31710
|
var _super = _create_super$9(AxAIAnthropic);
|
|
31701
31711
|
function AxAIAnthropic(param) {
|
|
31702
|
-
var apiKey = param.apiKey, config = param.config, options = param.options;
|
|
31712
|
+
var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
|
|
31703
31713
|
_class_call_check$i(this, AxAIAnthropic);
|
|
31704
31714
|
var _this;
|
|
31705
31715
|
if (!apiKey || apiKey === "") {
|
|
@@ -31721,12 +31731,13 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
31721
31731
|
supportFor: {
|
|
31722
31732
|
functions: true,
|
|
31723
31733
|
streaming: true
|
|
31724
|
-
}
|
|
31734
|
+
},
|
|
31735
|
+
modelMap: modelMap
|
|
31725
31736
|
});
|
|
31726
31737
|
_define_property$i(_assert_this_initialized$9(_this), "config", void 0);
|
|
31727
31738
|
_define_property$i(_assert_this_initialized$9(_this), "generateChatReq", function(req) {
|
|
31728
31739
|
var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
|
|
31729
|
-
var model =
|
|
31740
|
+
var model = req.model;
|
|
31730
31741
|
var apiConfig = {
|
|
31731
31742
|
name: "/messages"
|
|
31732
31743
|
};
|
|
@@ -32623,7 +32634,7 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
32623
32634
|
_inherits$8(AxAIGroq, AxAIOpenAI);
|
|
32624
32635
|
var _super = _create_super$8(AxAIGroq);
|
|
32625
32636
|
function AxAIGroq(param) {
|
|
32626
|
-
var apiKey = param.apiKey, config = param.config, options = param.options;
|
|
32637
|
+
var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
|
|
32627
32638
|
_class_call_check$g(this, AxAIGroq);
|
|
32628
32639
|
var _this;
|
|
32629
32640
|
if (!apiKey || apiKey === "") {
|
|
@@ -32638,7 +32649,8 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
32638
32649
|
config: _config,
|
|
32639
32650
|
options: _options,
|
|
32640
32651
|
apiURL: "https://api.groq.com/openai/v1",
|
|
32641
|
-
modelInfo: []
|
|
32652
|
+
modelInfo: [],
|
|
32653
|
+
modelMap: modelMap
|
|
32642
32654
|
});
|
|
32643
32655
|
_define_property$g(_assert_this_initialized$8(_this), "setOptions", function(options) {
|
|
32644
32656
|
var rateLimiter = _this.newRateLimiter(options);
|
|
@@ -32694,19 +32706,23 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
32694
32706
|
}
|
|
32695
32707
|
(AxAIOpenAI);
|
|
32696
32708
|
|
|
32709
|
+
// cspell:ignore mistral, mixtral, codestral, nemo
|
|
32697
32710
|
var AxAIMistralModel;
|
|
32698
32711
|
(function(AxAIMistralModel) {
|
|
32699
32712
|
AxAIMistralModel["Mistral7B"] = "open-mistral-7b";
|
|
32700
32713
|
AxAIMistralModel["Mistral8x7B"] = "open-mixtral-8x7b";
|
|
32701
32714
|
AxAIMistralModel["MistralSmall"] = "mistral-small-latest";
|
|
32702
|
-
AxAIMistralModel["MistralMedium"] = "mistral-medium-latest";
|
|
32703
32715
|
AxAIMistralModel["MistralLarge"] = "mistral-large-latest";
|
|
32716
|
+
AxAIMistralModel["Codestral"] = "codestral-latest";
|
|
32717
|
+
AxAIMistralModel["OpenCodestralMamba"] = "open-codestral-mamba";
|
|
32718
|
+
AxAIMistralModel["OpenMistralNemo"] = "open-mistral-nemo-latest";
|
|
32704
32719
|
})(AxAIMistralModel || (AxAIMistralModel = {}));
|
|
32705
32720
|
var AxAIMistralEmbedModels;
|
|
32706
32721
|
(function(AxAIMistralEmbedModels) {
|
|
32707
32722
|
AxAIMistralEmbedModels["MistralEmbed"] = "mistral-embed";
|
|
32708
32723
|
})(AxAIMistralEmbedModels || (AxAIMistralEmbedModels = {}));
|
|
32709
32724
|
|
|
32725
|
+
// cspell:ignore mistral, mixtral, codestral, nemo
|
|
32710
32726
|
var axModelInfoMistral = [
|
|
32711
32727
|
{
|
|
32712
32728
|
name: AxAIMistralModel.Mistral7B,
|
|
@@ -32726,17 +32742,29 @@ var axModelInfoMistral = [
|
|
|
32726
32742
|
promptTokenCostPer1M: 2,
|
|
32727
32743
|
completionTokenCostPer1M: 6
|
|
32728
32744
|
},
|
|
32729
|
-
{
|
|
32730
|
-
name: AxAIMistralModel.MistralMedium,
|
|
32731
|
-
currency: "USD",
|
|
32732
|
-
promptTokenCostPer1M: 2.7,
|
|
32733
|
-
completionTokenCostPer1M: 8.1
|
|
32734
|
-
},
|
|
32735
32745
|
{
|
|
32736
32746
|
name: AxAIMistralModel.MistralLarge,
|
|
32737
32747
|
currency: "USD",
|
|
32738
32748
|
promptTokenCostPer1M: 8,
|
|
32739
32749
|
completionTokenCostPer1M: 24
|
|
32750
|
+
},
|
|
32751
|
+
{
|
|
32752
|
+
name: AxAIMistralModel.Codestral,
|
|
32753
|
+
currency: "USD",
|
|
32754
|
+
promptTokenCostPer1M: 1,
|
|
32755
|
+
completionTokenCostPer1M: 3
|
|
32756
|
+
},
|
|
32757
|
+
{
|
|
32758
|
+
name: AxAIMistralModel.OpenCodestralMamba,
|
|
32759
|
+
currency: "USD",
|
|
32760
|
+
promptTokenCostPer1M: 0.25,
|
|
32761
|
+
completionTokenCostPer1M: 0.25
|
|
32762
|
+
},
|
|
32763
|
+
{
|
|
32764
|
+
name: AxAIMistralModel.OpenMistralNemo,
|
|
32765
|
+
currency: "USD",
|
|
32766
|
+
promptTokenCostPer1M: 0.3,
|
|
32767
|
+
completionTokenCostPer1M: 0.3
|
|
32740
32768
|
}
|
|
32741
32769
|
];
|
|
32742
32770
|
|
|
@@ -32871,7 +32899,7 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
32871
32899
|
_inherits$7(AxAIMistral, AxAIOpenAI);
|
|
32872
32900
|
var _super = _create_super$7(AxAIMistral);
|
|
32873
32901
|
function AxAIMistral(param) {
|
|
32874
|
-
var apiKey = param.apiKey, config = param.config, options = param.options;
|
|
32902
|
+
var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
|
|
32875
32903
|
_class_call_check$f(this, AxAIMistral);
|
|
32876
32904
|
if (!apiKey || apiKey === "") {
|
|
32877
32905
|
throw new Error("Mistral API key not set");
|
|
@@ -32882,7 +32910,8 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
32882
32910
|
config: _config,
|
|
32883
32911
|
options: options,
|
|
32884
32912
|
apiURL: "https://api.mistral.ai/v1",
|
|
32885
|
-
modelInfo: axModelInfoMistral
|
|
32913
|
+
modelInfo: axModelInfoMistral,
|
|
32914
|
+
modelMap: modelMap
|
|
32886
32915
|
});
|
|
32887
32916
|
_get$2((_assert_this_initialized$7(_this), _get_prototype_of$7(AxAIMistral.prototype)), "setName", _this).call(_this, "Mistral");
|
|
32888
32917
|
return _this;
|
|
@@ -33046,7 +33075,7 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
33046
33075
|
_inherits$6(AxAIDeepSeek, AxAIOpenAI);
|
|
33047
33076
|
var _super = _create_super$6(AxAIDeepSeek);
|
|
33048
33077
|
function AxAIDeepSeek(param) {
|
|
33049
|
-
var apiKey = param.apiKey, config = param.config, options = param.options;
|
|
33078
|
+
var apiKey = param.apiKey, config = param.config, options = param.options, modelMap = param.modelMap;
|
|
33050
33079
|
_class_call_check$e(this, AxAIDeepSeek);
|
|
33051
33080
|
if (!apiKey || apiKey === "") {
|
|
33052
33081
|
throw new Error("DeepSeek API key not set");
|
|
@@ -33057,7 +33086,8 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
33057
33086
|
config: _config,
|
|
33058
33087
|
options: options,
|
|
33059
33088
|
apiURL: "https://api.deepseek.com",
|
|
33060
|
-
modelInfo: axModelInfoDeepSeek
|
|
33089
|
+
modelInfo: axModelInfoDeepSeek,
|
|
33090
|
+
modelMap: modelMap
|
|
33061
33091
|
});
|
|
33062
33092
|
_get$1((_assert_this_initialized$6(_this), _get_prototype_of$6(AxAIDeepSeek.prototype)), "setName", _this).call(_this, "DeepSeek");
|
|
33063
33093
|
return _this;
|
|
@@ -33225,14 +33255,15 @@ var axAIOllamaDefaultConfig = function() {
|
|
|
33225
33255
|
_inherits$5(AxAIOllama, AxAIOpenAI);
|
|
33226
33256
|
var _super = _create_super$5(AxAIOllama);
|
|
33227
33257
|
function AxAIOllama(param) {
|
|
33228
|
-
var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options;
|
|
33258
|
+
var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options, modelMap = param.modelMap;
|
|
33229
33259
|
_class_call_check$d(this, AxAIOllama);
|
|
33230
33260
|
var _config = _object_spread$6({}, axAIOllamaDefaultConfig(), config);
|
|
33231
33261
|
var _this = _super.call(this, {
|
|
33232
33262
|
apiKey: apiKey,
|
|
33233
33263
|
options: options,
|
|
33234
33264
|
config: _config,
|
|
33235
|
-
apiURL: new URL("/
|
|
33265
|
+
apiURL: new URL("/api", url).href,
|
|
33266
|
+
modelMap: modelMap
|
|
33236
33267
|
});
|
|
33237
33268
|
_get((_assert_this_initialized$5(_this), _get_prototype_of$5(AxAIOllama.prototype)), "setName", _this).call(_this, "Ollama");
|
|
33238
33269
|
return _this;
|
|
@@ -33440,18 +33471,6 @@ var AxAI = /*#__PURE__*/ function() {
|
|
|
33440
33471
|
}
|
|
33441
33472
|
}
|
|
33442
33473
|
_create_class$5(AxAI, [
|
|
33443
|
-
{
|
|
33444
|
-
key: "setModelMap",
|
|
33445
|
-
value: function setModelMap(modelMap) {
|
|
33446
|
-
this.ai.setModelMap(modelMap);
|
|
33447
|
-
}
|
|
33448
|
-
},
|
|
33449
|
-
{
|
|
33450
|
-
key: "setEmbedModelMap",
|
|
33451
|
-
value: function setEmbedModelMap(modelMap) {
|
|
33452
|
-
this.ai.setEmbedModelMap(modelMap);
|
|
33453
|
-
}
|
|
33454
|
-
},
|
|
33455
33474
|
{
|
|
33456
33475
|
key: "getName",
|
|
33457
33476
|
value: function getName() {
|
|
@@ -33482,6 +33501,12 @@ var AxAI = /*#__PURE__*/ function() {
|
|
|
33482
33501
|
return this.ai.getFeatures();
|
|
33483
33502
|
}
|
|
33484
33503
|
},
|
|
33504
|
+
{
|
|
33505
|
+
key: "getModelMap",
|
|
33506
|
+
value: function getModelMap() {
|
|
33507
|
+
return this.ai.getModelMap();
|
|
33508
|
+
}
|
|
33509
|
+
},
|
|
33485
33510
|
{
|
|
33486
33511
|
key: "chat",
|
|
33487
33512
|
value: function chat(req, options) {
|
|
@@ -34040,8 +34065,8 @@ var assertRequiredFields = function(sig, values) {
|
|
|
34040
34065
|
});
|
|
34041
34066
|
if (missingFields.length > 0) {
|
|
34042
34067
|
throw new AxAssertionError({
|
|
34043
|
-
message: "
|
|
34044
|
-
return f.
|
|
34068
|
+
message: "Output must include: t: ".concat(missingFields.map(function(f) {
|
|
34069
|
+
return "`".concat(f.title, ":`");
|
|
34045
34070
|
}).join(", ")),
|
|
34046
34071
|
values: values
|
|
34047
34072
|
});
|
|
@@ -38847,7 +38872,7 @@ var AxPromptTemplate = function AxPromptTemplate(sig, fieldTemplates) {
|
|
|
38847
38872
|
this.outputFormat = {
|
|
38848
38873
|
type: "text",
|
|
38849
38874
|
text: [
|
|
38850
|
-
"
|
|
38875
|
+
"Use the following output format."
|
|
38851
38876
|
].concat(_to_consumable_array$2(this.renderOutFields(this.sig.getOutputFields())), [
|
|
38852
38877
|
"---\n\n"
|
|
38853
38878
|
]).join("\n\n")
|