@digipair/skill-dsp 0.21.15 → 0.22.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs.js +134 -155
- package/index.esm.js +139 -160
- package/libs/skill-dsp/src/lib/skill-dsp.d.ts +1 -1
- package/package.json +1 -1
package/index.esm.js
CHANGED
|
@@ -20332,7 +20332,6 @@ about the parse state.
|
|
|
20332
20332
|
var _a;
|
|
20333
20333
|
var depth = action >> 19 /* Action.ReduceDepthShift */ , type = action & 65535 /* Action.ValueMask */ ;
|
|
20334
20334
|
var parser = this.p.parser;
|
|
20335
|
-
if (this.reducePos < this.pos - 25 /* Lookahead.Margin */ ) this.setLookAhead(this.pos);
|
|
20336
20335
|
var dPrec = parser.dynamicPrecedence(type);
|
|
20337
20336
|
if (dPrec) this.score += dPrec;
|
|
20338
20337
|
if (depth == 0) {
|
|
@@ -21285,7 +21284,7 @@ function cutAt(tree, pos, side) {
|
|
|
21285
21284
|
cursor.moveTo(pos);
|
|
21286
21285
|
for(;;){
|
|
21287
21286
|
if (!(side < 0 ? cursor.childBefore(pos) : cursor.childAfter(pos))) for(;;){
|
|
21288
|
-
if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /*
|
|
21287
|
+
if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError) return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Safety.Margin */ )) : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Safety.Margin */ ));
|
|
21289
21288
|
if (side < 0 ? cursor.prevSibling() : cursor.nextSibling()) break;
|
|
21290
21289
|
if (!cursor.parent()) return side < 0 ? 0 : tree.length;
|
|
21291
21290
|
}
|
|
@@ -21408,7 +21407,7 @@ var TokenCache = /*#__PURE__*/ function() {
|
|
|
21408
21407
|
token.mask = mask;
|
|
21409
21408
|
token.context = context;
|
|
21410
21409
|
}
|
|
21411
|
-
if (token.lookAhead > token.end + 25 /*
|
|
21410
|
+
if (token.lookAhead > token.end + 25 /* Safety.Margin */ ) lookAhead = Math.max(token.lookAhead, lookAhead);
|
|
21412
21411
|
if (token.value != 0 /* Term.Err */ ) {
|
|
21413
21412
|
var startIndex = actionIndex;
|
|
21414
21413
|
if (token.extended > -1) actionIndex = this.addActions(stack, token.extended, token.end, actionIndex);
|
|
@@ -23499,14 +23498,14 @@ function indent(str, spaces) {
|
|
|
23499
23498
|
var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
|
|
23500
23499
|
// match is required
|
|
23501
23500
|
if (!match) {
|
|
23502
|
-
return
|
|
23501
|
+
return tokens = tokens1, nextMatch = nextMatch1, i = i1, {
|
|
23503
23502
|
v: nextMatch1
|
|
23504
23503
|
};
|
|
23505
23504
|
}
|
|
23506
23505
|
var token = match.token, offset = match.offset;
|
|
23507
23506
|
i1 += offset;
|
|
23508
23507
|
if (token === " ") {
|
|
23509
|
-
return
|
|
23508
|
+
return tokens = tokens1, nextMatch = nextMatch1, i = i1, "continue";
|
|
23510
23509
|
}
|
|
23511
23510
|
tokens1 = _to_consumable_array$6(tokens1).concat([
|
|
23512
23511
|
token
|
|
@@ -23525,7 +23524,7 @@ function indent(str, spaces) {
|
|
|
23525
23524
|
if (contextKeys.some(function(el) {
|
|
23526
23525
|
return el.startsWith(name);
|
|
23527
23526
|
})) {
|
|
23528
|
-
return
|
|
23527
|
+
return tokens = tokens1, nextMatch = nextMatch1, i = i1, "continue";
|
|
23529
23528
|
}
|
|
23530
23529
|
if (dateTimeIdentifiers.some(function(el) {
|
|
23531
23530
|
return el === name;
|
|
@@ -23544,9 +23543,9 @@ function indent(str, spaces) {
|
|
|
23544
23543
|
if (dateTimeIdentifiers.some(function(el) {
|
|
23545
23544
|
return el.startsWith(name);
|
|
23546
23545
|
})) {
|
|
23547
|
-
return
|
|
23546
|
+
return tokens = tokens1, nextMatch = nextMatch1, i = i1, "continue";
|
|
23548
23547
|
}
|
|
23549
|
-
return
|
|
23548
|
+
return tokens = tokens1, nextMatch = nextMatch1, i = i1, {
|
|
23550
23549
|
v: nextMatch1
|
|
23551
23550
|
};
|
|
23552
23551
|
};
|
|
@@ -24349,7 +24348,6 @@ var parser = LRParser.deserialize({
|
|
|
24349
24348
|
},
|
|
24350
24349
|
dynamicPrecedences: {
|
|
24351
24350
|
"31": -1,
|
|
24352
|
-
"67": 1,
|
|
24353
24351
|
"71": -1,
|
|
24354
24352
|
"73": -1
|
|
24355
24353
|
},
|
|
@@ -28611,7 +28609,7 @@ var axBaseAIDefaultConfig = function() {
|
|
|
28611
28609
|
};
|
|
28612
28610
|
var AxBaseAI = /*#__PURE__*/ function() {
|
|
28613
28611
|
function AxBaseAI(param) {
|
|
28614
|
-
var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor
|
|
28612
|
+
var name = param.name, apiURL = param.apiURL, headers = param.headers, modelInfo = param.modelInfo, models = param.models, _param_options = param.options, options = _param_options === void 0 ? {} : _param_options, supportFor = param.supportFor;
|
|
28615
28613
|
_class_call_check$p(this, AxBaseAI);
|
|
28616
28614
|
_define_property$p(this, "generateChatReq", void 0);
|
|
28617
28615
|
_define_property$p(this, "generateEmbedReq", void 0);
|
|
@@ -28623,29 +28621,48 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28623
28621
|
_define_property$p(this, "fetch", void 0);
|
|
28624
28622
|
_define_property$p(this, "tracer", void 0);
|
|
28625
28623
|
_define_property$p(this, "modelMap", void 0);
|
|
28626
|
-
_define_property$p(this, "
|
|
28624
|
+
_define_property$p(this, "embedModelMap", void 0);
|
|
28627
28625
|
_define_property$p(this, "modelUsage", void 0);
|
|
28628
28626
|
_define_property$p(this, "embedModelUsage", void 0);
|
|
28629
|
-
_define_property$p(this, "models", void 0);
|
|
28630
28627
|
_define_property$p(this, "apiURL", void 0);
|
|
28631
28628
|
_define_property$p(this, "name", void 0);
|
|
28632
28629
|
_define_property$p(this, "headers", void 0);
|
|
28630
|
+
_define_property$p(this, "modelInfo", void 0);
|
|
28631
|
+
_define_property$p(this, "embedModelInfo", void 0);
|
|
28633
28632
|
_define_property$p(this, "supportFor", void 0);
|
|
28634
28633
|
this.name = name;
|
|
28635
28634
|
this.apiURL = apiURL;
|
|
28636
28635
|
this.headers = headers;
|
|
28637
28636
|
this.supportFor = supportFor;
|
|
28638
28637
|
this.tracer = options.tracer;
|
|
28639
|
-
|
|
28640
|
-
|
|
28641
|
-
|
|
28642
|
-
|
|
28643
|
-
|
|
28644
|
-
|
|
28645
|
-
|
|
28646
|
-
|
|
28638
|
+
var model = this.getModel(models.model);
|
|
28639
|
+
var embedModel = this.getEmbedModel(models.embedModel);
|
|
28640
|
+
if (typeof model === "string") {
|
|
28641
|
+
var modelName = model.replace(/-0\d+$|-\d{2,}$/, "");
|
|
28642
|
+
var _modelInfo_filter_at;
|
|
28643
|
+
this.modelInfo = (_modelInfo_filter_at = modelInfo.filter(function(v) {
|
|
28644
|
+
return v.name === modelName;
|
|
28645
|
+
}).at(0)) !== null && _modelInfo_filter_at !== void 0 ? _modelInfo_filter_at : {
|
|
28646
|
+
name: model,
|
|
28647
|
+
currency: "usd",
|
|
28648
|
+
promptTokenCostPer1M: 0,
|
|
28649
|
+
completionTokenCostPer1M: 0
|
|
28650
|
+
};
|
|
28651
|
+
} else {
|
|
28647
28652
|
throw new Error("No model defined");
|
|
28648
28653
|
}
|
|
28654
|
+
if (typeof embedModel === "string") {
|
|
28655
|
+
var embedModelName = embedModel === null || embedModel === void 0 ? void 0 : embedModel.replace(/-0\d+$|-\d{2,}$/, "");
|
|
28656
|
+
var _modelInfo_filter_at1;
|
|
28657
|
+
this.embedModelInfo = (_modelInfo_filter_at1 = modelInfo.filter(function(v) {
|
|
28658
|
+
return v.name === embedModelName;
|
|
28659
|
+
}).at(0)) !== null && _modelInfo_filter_at1 !== void 0 ? _modelInfo_filter_at1 : {
|
|
28660
|
+
name: embedModel !== null && embedModel !== void 0 ? embedModel : "",
|
|
28661
|
+
currency: "usd",
|
|
28662
|
+
promptTokenCostPer1M: 0,
|
|
28663
|
+
completionTokenCostPer1M: 0
|
|
28664
|
+
};
|
|
28665
|
+
}
|
|
28649
28666
|
this.setOptions(options);
|
|
28650
28667
|
}
|
|
28651
28668
|
_create_class$c(AxBaseAI, [
|
|
@@ -28685,27 +28702,21 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28685
28702
|
}
|
|
28686
28703
|
},
|
|
28687
28704
|
{
|
|
28688
|
-
key: "
|
|
28689
|
-
value: function
|
|
28690
|
-
|
|
28691
|
-
|
|
28692
|
-
|
|
28693
|
-
|
|
28694
|
-
|
|
28695
|
-
|
|
28696
|
-
|
|
28697
|
-
}).at(0)) !== null && _this_modelInfo_filter_at !== void 0 ? _this_modelInfo_filter_at : {
|
|
28698
|
-
name: model,
|
|
28699
|
-
currency: "usd",
|
|
28700
|
-
promptTokenCostPer1M: 0,
|
|
28701
|
-
completionTokenCostPer1M: 0
|
|
28702
|
-
};
|
|
28705
|
+
key: "setModelMap",
|
|
28706
|
+
value: function setModelMap(modelMap) {
|
|
28707
|
+
this.modelMap = modelMap;
|
|
28708
|
+
}
|
|
28709
|
+
},
|
|
28710
|
+
{
|
|
28711
|
+
key: "setEmbedModelMap",
|
|
28712
|
+
value: function setEmbedModelMap(embedModelMap) {
|
|
28713
|
+
this.embedModelMap = embedModelMap;
|
|
28703
28714
|
}
|
|
28704
28715
|
},
|
|
28705
28716
|
{
|
|
28706
28717
|
key: "getModelInfo",
|
|
28707
28718
|
value: function getModelInfo() {
|
|
28708
|
-
return _object_spread_props$8(_object_spread$h({}, this.
|
|
28719
|
+
return _object_spread_props$8(_object_spread$h({}, this.modelInfo), {
|
|
28709
28720
|
provider: this.name
|
|
28710
28721
|
});
|
|
28711
28722
|
}
|
|
@@ -28713,17 +28724,9 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28713
28724
|
{
|
|
28714
28725
|
key: "getEmbedModelInfo",
|
|
28715
28726
|
value: function getEmbedModelInfo() {
|
|
28716
|
-
|
|
28717
|
-
|
|
28718
|
-
|
|
28719
|
-
});
|
|
28720
|
-
}
|
|
28721
|
-
}
|
|
28722
|
-
},
|
|
28723
|
-
{
|
|
28724
|
-
key: "getModelMap",
|
|
28725
|
-
value: function getModelMap() {
|
|
28726
|
-
return this.modelMap;
|
|
28727
|
+
return this.embedModelInfo ? _object_spread_props$8(_object_spread$h({}, this.embedModelInfo), {
|
|
28728
|
+
provider: this.name
|
|
28729
|
+
}) : undefined;
|
|
28727
28730
|
}
|
|
28728
28731
|
},
|
|
28729
28732
|
{
|
|
@@ -28746,14 +28749,13 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28746
28749
|
},
|
|
28747
28750
|
{
|
|
28748
28751
|
key: "chat",
|
|
28749
|
-
value: function chat(
|
|
28752
|
+
value: function chat(_req, options) {
|
|
28750
28753
|
var _this = this;
|
|
28751
28754
|
return _async_to_generator$7(function() {
|
|
28752
|
-
var
|
|
28755
|
+
var _mc_stopSequences, _this_tracer, mc, _obj;
|
|
28753
28756
|
return _ts_generator$8(this, function(_state) {
|
|
28754
28757
|
switch(_state.label){
|
|
28755
28758
|
case 0:
|
|
28756
|
-
model = req.model ? (_this_modelMap_req_model = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.model]) !== null && _this_modelMap_req_model !== void 0 ? _this_modelMap_req_model : req.model : _this.models.model;
|
|
28757
28759
|
if (!_this.tracer) return [
|
|
28758
28760
|
3,
|
|
28759
28761
|
2
|
|
@@ -28763,7 +28765,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28763
28765
|
4,
|
|
28764
28766
|
(_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Chat Request", {
|
|
28765
28767
|
kind: AxSpanKind.SERVER,
|
|
28766
|
-
attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL,
|
|
28768
|
+
attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.modelInfo.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MAX_TOKENS, mc.maxTokens), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TEMPERATURE, mc.temperature), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_P, mc.topP), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_TOP_K, mc.topK), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_FREQUENCY_PENALTY, mc.frequencyPenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_PRESENCE_PENALTY, mc.presencePenalty), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_STOP_SEQUENCES, (_mc_stopSequences = mc.stopSequences) === null || _mc_stopSequences === void 0 ? void 0 : _mc_stopSequences.join(", ")), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_LLM_IS_STREAMING, mc.stream), _obj)
|
|
28767
28769
|
}, function() {
|
|
28768
28770
|
var _ref = _async_to_generator$7(function(span) {
|
|
28769
28771
|
var res;
|
|
@@ -28772,7 +28774,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28772
28774
|
case 0:
|
|
28773
28775
|
return [
|
|
28774
28776
|
4,
|
|
28775
|
-
_this._chat(
|
|
28777
|
+
_this._chat(_req, options, span)
|
|
28776
28778
|
];
|
|
28777
28779
|
case 1:
|
|
28778
28780
|
res = _state.sent();
|
|
@@ -28797,7 +28799,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28797
28799
|
case 2:
|
|
28798
28800
|
return [
|
|
28799
28801
|
4,
|
|
28800
|
-
_this._chat(
|
|
28802
|
+
_this._chat(_req, options)
|
|
28801
28803
|
];
|
|
28802
28804
|
case 3:
|
|
28803
28805
|
return [
|
|
@@ -28811,10 +28813,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28811
28813
|
},
|
|
28812
28814
|
{
|
|
28813
28815
|
key: "_chat",
|
|
28814
|
-
value: function _chat(
|
|
28816
|
+
value: function _chat(_req, options, span) {
|
|
28815
28817
|
var _this = this;
|
|
28816
28818
|
return _async_to_generator$7(function() {
|
|
28817
|
-
var
|
|
28819
|
+
var _req_modelConfig, reqFn, _options_stream, stream, functions, req, fn, rv, _tmp, respFn, wrappedRespFn, doneCb, st, res;
|
|
28818
28820
|
return _ts_generator$8(this, function(_state) {
|
|
28819
28821
|
switch(_state.label){
|
|
28820
28822
|
case 0:
|
|
@@ -28822,14 +28824,11 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28822
28824
|
throw new Error("generateChatReq not implemented");
|
|
28823
28825
|
}
|
|
28824
28826
|
reqFn = _this.generateChatReq;
|
|
28825
|
-
stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (
|
|
28826
|
-
|
|
28827
|
-
|
|
28828
|
-
}
|
|
28829
|
-
req = _object_spread_props$8(_object_spread$h({}, chatReq), {
|
|
28830
|
-
model: model,
|
|
28827
|
+
stream = (_options_stream = options === null || options === void 0 ? void 0 : options.stream) !== null && _options_stream !== void 0 ? _options_stream : (_req_modelConfig = _req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream;
|
|
28828
|
+
functions = _req.functions && _req.functions.length > 0 ? _req.functions : undefined;
|
|
28829
|
+
req = _object_spread_props$8(_object_spread$h({}, _req), {
|
|
28831
28830
|
functions: functions,
|
|
28832
|
-
modelConfig: _object_spread_props$8(_object_spread$h({},
|
|
28831
|
+
modelConfig: _object_spread_props$8(_object_spread$h({}, _req.modelConfig), {
|
|
28833
28832
|
stream: stream
|
|
28834
28833
|
})
|
|
28835
28834
|
});
|
|
@@ -28966,14 +28965,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28966
28965
|
value: function embed(req, options) {
|
|
28967
28966
|
var _this = this;
|
|
28968
28967
|
return _async_to_generator$7(function() {
|
|
28969
|
-
var
|
|
28968
|
+
var _this_tracer, _obj;
|
|
28970
28969
|
return _ts_generator$8(this, function(_state) {
|
|
28971
28970
|
switch(_state.label){
|
|
28972
28971
|
case 0:
|
|
28973
|
-
embedModel = req.embedModel ? (_this_modelMap_req_embedModel = (_this_modelMap = _this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[req.embedModel]) !== null && _this_modelMap_req_embedModel !== void 0 ? _this_modelMap_req_embedModel : req.embedModel : _this.models.embedModel;
|
|
28974
|
-
if (!embedModel) {
|
|
28975
|
-
throw new Error("No embed model defined");
|
|
28976
|
-
}
|
|
28977
28972
|
if (!_this.tracer) return [
|
|
28978
28973
|
3,
|
|
28979
28974
|
2
|
|
@@ -28982,7 +28977,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28982
28977
|
4,
|
|
28983
28978
|
(_this_tracer = _this.tracer) === null || _this_tracer === void 0 ? void 0 : _this_tracer.startActiveSpan("Embed Request", {
|
|
28984
28979
|
kind: AxSpanKind.SERVER,
|
|
28985
|
-
attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL,
|
|
28980
|
+
attributes: (_obj = {}, _define_property$p(_obj, axSpanAttributes.LLM_SYSTEM, _this.name), _define_property$p(_obj, axSpanAttributes.LLM_REQUEST_MODEL, _this.modelInfo.name), _obj)
|
|
28986
28981
|
}, function() {
|
|
28987
28982
|
var _ref = _async_to_generator$7(function(span) {
|
|
28988
28983
|
var res;
|
|
@@ -28991,7 +28986,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
28991
28986
|
case 0:
|
|
28992
28987
|
return [
|
|
28993
28988
|
4,
|
|
28994
|
-
_this._embed(
|
|
28989
|
+
_this._embed(req, options, span)
|
|
28995
28990
|
];
|
|
28996
28991
|
case 1:
|
|
28997
28992
|
res = _state.sent();
|
|
@@ -29016,7 +29011,7 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
29016
29011
|
case 2:
|
|
29017
29012
|
return [
|
|
29018
29013
|
2,
|
|
29019
|
-
_this._embed(
|
|
29014
|
+
_this._embed(req, options)
|
|
29020
29015
|
];
|
|
29021
29016
|
}
|
|
29022
29017
|
});
|
|
@@ -29025,10 +29020,10 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
29025
29020
|
},
|
|
29026
29021
|
{
|
|
29027
29022
|
key: "_embed",
|
|
29028
|
-
value: function _embed(
|
|
29023
|
+
value: function _embed(req, options, span) {
|
|
29029
29024
|
var _this = this;
|
|
29030
29025
|
return _async_to_generator$7(function() {
|
|
29031
|
-
var
|
|
29026
|
+
var fn, resValue, _tmp, res, _res_modelUsage_completionTokens, _obj;
|
|
29032
29027
|
return _ts_generator$8(this, function(_state) {
|
|
29033
29028
|
switch(_state.label){
|
|
29034
29029
|
case 0:
|
|
@@ -29038,9 +29033,6 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
29038
29033
|
if (!_this.generateEmbedResp) {
|
|
29039
29034
|
throw new Error("generateEmbedResp not implemented");
|
|
29040
29035
|
}
|
|
29041
|
-
req = _object_spread_props$8(_object_spread$h({}, embedReq), {
|
|
29042
|
-
embedModel: embedModel
|
|
29043
|
-
});
|
|
29044
29036
|
fn = function() {
|
|
29045
29037
|
var _ref = _async_to_generator$7(function() {
|
|
29046
29038
|
var _this_generateEmbedReq, apiConfig, reqValue, res;
|
|
@@ -29122,6 +29114,22 @@ var AxBaseAI = /*#__PURE__*/ function() {
|
|
|
29122
29114
|
var headers = arguments.length > 0 && arguments[0] !== void 0 ? arguments[0] : {};
|
|
29123
29115
|
return _object_spread$h({}, headers, this.headers);
|
|
29124
29116
|
}
|
|
29117
|
+
},
|
|
29118
|
+
{
|
|
29119
|
+
key: "getEmbedModel",
|
|
29120
|
+
value: function getEmbedModel(name) {
|
|
29121
|
+
var _this_embedModelMap;
|
|
29122
|
+
var _this_embedModelMap_name;
|
|
29123
|
+
return name ? (_this_embedModelMap_name = (_this_embedModelMap = this.embedModelMap) === null || _this_embedModelMap === void 0 ? void 0 : _this_embedModelMap[name]) !== null && _this_embedModelMap_name !== void 0 ? _this_embedModelMap_name : name : undefined;
|
|
29124
|
+
}
|
|
29125
|
+
},
|
|
29126
|
+
{
|
|
29127
|
+
key: "getModel",
|
|
29128
|
+
value: function getModel(name) {
|
|
29129
|
+
var _this_modelMap;
|
|
29130
|
+
var _this_modelMap_name;
|
|
29131
|
+
return (_this_modelMap_name = (_this_modelMap = this.modelMap) === null || _this_modelMap === void 0 ? void 0 : _this_modelMap[name]) !== null && _this_modelMap_name !== void 0 ? _this_modelMap_name : name;
|
|
29132
|
+
}
|
|
29125
29133
|
}
|
|
29126
29134
|
]);
|
|
29127
29135
|
return AxBaseAI;
|
|
@@ -29285,7 +29293,6 @@ var AxAIOpenAIModel;
|
|
|
29285
29293
|
(function(AxAIOpenAIModel) {
|
|
29286
29294
|
AxAIOpenAIModel["GPT4"] = "gpt-4";
|
|
29287
29295
|
AxAIOpenAIModel["GPT4O"] = "gpt-4o";
|
|
29288
|
-
AxAIOpenAIModel["GPT4OMini"] = "gpt-4o-mini";
|
|
29289
29296
|
AxAIOpenAIModel["GPT4Turbo"] = "gpt-4-turbo";
|
|
29290
29297
|
AxAIOpenAIModel["GPT35Turbo"] = "gpt-3.5-turbo";
|
|
29291
29298
|
AxAIOpenAIModel["GPT35TurboInstruct"] = "gpt-3.5-turbo-instruct";
|
|
@@ -29316,12 +29323,6 @@ var AxAIOpenAIEmbedModel;
|
|
|
29316
29323
|
promptTokenCostPer1M: 5,
|
|
29317
29324
|
completionTokenCostPer1M: 15
|
|
29318
29325
|
},
|
|
29319
|
-
{
|
|
29320
|
-
name: AxAIOpenAIModel.GPT4OMini,
|
|
29321
|
-
currency: "usd",
|
|
29322
|
-
promptTokenCostPer1M: 0.15,
|
|
29323
|
-
completionTokenCostPer1M: 0.6
|
|
29324
|
-
},
|
|
29325
29326
|
{
|
|
29326
29327
|
name: AxAIOpenAIModel.GPT4Turbo,
|
|
29327
29328
|
currency: "usd",
|
|
@@ -29477,7 +29478,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
29477
29478
|
_inherits$f(AxAIOpenAI, AxBaseAI);
|
|
29478
29479
|
var _super = _create_super$f(AxAIOpenAI);
|
|
29479
29480
|
function AxAIOpenAI(param) {
|
|
29480
|
-
var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo
|
|
29481
|
+
var apiKey = param.apiKey, config = param.config, options = param.options, apiURL = param.apiURL, _param_modelInfo = param.modelInfo, modelInfo = _param_modelInfo === void 0 ? axModelInfoOpenAI : _param_modelInfo;
|
|
29481
29482
|
_class_call_check$o(this, AxAIOpenAI);
|
|
29482
29483
|
var _this;
|
|
29483
29484
|
if (!apiKey || apiKey === "") {
|
|
@@ -29499,16 +29500,14 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
29499
29500
|
supportFor: {
|
|
29500
29501
|
functions: true,
|
|
29501
29502
|
streaming: true
|
|
29502
|
-
}
|
|
29503
|
-
modelMap: modelMap
|
|
29503
|
+
}
|
|
29504
29504
|
});
|
|
29505
29505
|
_define_property$o(_assert_this_initialized$f(_this), "config", void 0);
|
|
29506
29506
|
_define_property$o(_assert_this_initialized$f(_this), "streamingUsage", void 0);
|
|
29507
|
-
_define_property$o(_assert_this_initialized$f(_this), "dimensions", void 0);
|
|
29508
29507
|
_define_property$o(_assert_this_initialized$f(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
29509
29508
|
_config) {
|
|
29510
29509
|
var _req_functions, _req_modelConfig, _req_modelConfig1, _this_config, _this_config1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6, _req_modelConfig7;
|
|
29511
|
-
var model =
|
|
29510
|
+
var model = _this.config.model;
|
|
29512
29511
|
if (!req.chatPrompt || req.chatPrompt.length === 0) {
|
|
29513
29512
|
throw new Error("Chat prompt is empty");
|
|
29514
29513
|
}
|
|
@@ -29561,7 +29560,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
29561
29560
|
];
|
|
29562
29561
|
});
|
|
29563
29562
|
_define_property$o(_assert_this_initialized$f(_this), "generateEmbedReq", function(req) {
|
|
29564
|
-
var model =
|
|
29563
|
+
var model = _this.config.embedModel;
|
|
29565
29564
|
if (!model) {
|
|
29566
29565
|
throw new Error("Embed model not set");
|
|
29567
29566
|
}
|
|
@@ -29573,8 +29572,7 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
29573
29572
|
};
|
|
29574
29573
|
var reqValue = {
|
|
29575
29574
|
model: model,
|
|
29576
|
-
input: req.texts
|
|
29577
|
-
dimensions: _this.dimensions
|
|
29575
|
+
input: req.texts
|
|
29578
29576
|
};
|
|
29579
29577
|
return [
|
|
29580
29578
|
apiConfig,
|
|
@@ -29683,7 +29681,6 @@ var AxAIOpenAI = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
29683
29681
|
_this.config = _config;
|
|
29684
29682
|
var _options_streamingUsage;
|
|
29685
29683
|
_this.streamingUsage = (_options_streamingUsage = options === null || options === void 0 ? void 0 : options.streamingUsage) !== null && _options_streamingUsage !== void 0 ? _options_streamingUsage : true;
|
|
29686
|
-
_this.dimensions = config === null || config === void 0 ? void 0 : config.dimensions;
|
|
29687
29684
|
return _this;
|
|
29688
29685
|
}
|
|
29689
29686
|
_create_class$b(AxAIOpenAI, [
|
|
@@ -29917,7 +29914,7 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
29917
29914
|
_inherits$e(AxAIAzureOpenAI, AxAIOpenAI);
|
|
29918
29915
|
var _super = _create_super$e(AxAIAzureOpenAI);
|
|
29919
29916
|
function AxAIAzureOpenAI(param) {
|
|
29920
|
-
var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options
|
|
29917
|
+
var apiKey = param.apiKey, resourceName = param.resourceName, deploymentName = param.deploymentName, _param_version = param.version, version = _param_version === void 0 ? "api-version=2024-02-15-preview" : _param_version, config = param.config, options = param.options;
|
|
29921
29918
|
_class_call_check$n(this, AxAIAzureOpenAI);
|
|
29922
29919
|
if (!apiKey || apiKey === "") {
|
|
29923
29920
|
throw new Error("Azure OpenAPI API key not set");
|
|
@@ -29932,8 +29929,7 @@ var AxAIAzureOpenAI = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
29932
29929
|
var _this = _super.call(this, {
|
|
29933
29930
|
apiKey: apiKey,
|
|
29934
29931
|
config: _config,
|
|
29935
|
-
options: options
|
|
29936
|
-
modelMap: modelMap
|
|
29932
|
+
options: options
|
|
29937
29933
|
});
|
|
29938
29934
|
var host = resourceName.includes("://") ? resourceName : "https://".concat(resourceName, ".openai.azure.com/");
|
|
29939
29935
|
_get$5((_assert_this_initialized$e(_this), _get_prototype_of$e(AxAIAzureOpenAI.prototype)), "setName", _this).call(_this, "Azure OpenAI");
|
|
@@ -30079,7 +30075,7 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
30079
30075
|
_inherits$d(AxAIHuggingFace, AxBaseAI);
|
|
30080
30076
|
var _super = _create_super$d(AxAIHuggingFace);
|
|
30081
30077
|
function AxAIHuggingFace(param) {
|
|
30082
|
-
var apiKey = param.apiKey, config = param.config, options = param.options
|
|
30078
|
+
var apiKey = param.apiKey, config = param.config, options = param.options;
|
|
30083
30079
|
_class_call_check$m(this, AxAIHuggingFace);
|
|
30084
30080
|
var _this;
|
|
30085
30081
|
if (!apiKey || apiKey === "") {
|
|
@@ -30100,14 +30096,13 @@ var AxAIHuggingFace = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
30100
30096
|
supportFor: {
|
|
30101
30097
|
functions: false,
|
|
30102
30098
|
streaming: false
|
|
30103
|
-
}
|
|
30104
|
-
modelMap: modelMap
|
|
30099
|
+
}
|
|
30105
30100
|
});
|
|
30106
30101
|
_define_property$m(_assert_this_initialized$d(_this), "config", void 0);
|
|
30107
30102
|
_define_property$m(_assert_this_initialized$d(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
30108
30103
|
_config) {
|
|
30109
30104
|
var _req_chatPrompt, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4;
|
|
30110
|
-
var model =
|
|
30105
|
+
var model = _this.config.model;
|
|
30111
30106
|
var functionsList = req.functions ? "Functions:\n".concat(JSON.stringify(req.functions, null, 2), "\n") : "";
|
|
30112
30107
|
var prompt = (_req_chatPrompt = req.chatPrompt) === null || _req_chatPrompt === void 0 ? void 0 : _req_chatPrompt.map(function(msg) {
|
|
30113
30108
|
switch(msg.role){
|
|
@@ -30329,7 +30324,7 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
30329
30324
|
_inherits$c(AxAITogether, AxAIOpenAI);
|
|
30330
30325
|
var _super = _create_super$c(AxAITogether);
|
|
30331
30326
|
function AxAITogether(param) {
|
|
30332
|
-
var apiKey = param.apiKey, config = param.config, options = param.options
|
|
30327
|
+
var apiKey = param.apiKey, config = param.config, options = param.options;
|
|
30333
30328
|
_class_call_check$l(this, AxAITogether);
|
|
30334
30329
|
if (!apiKey || apiKey === "") {
|
|
30335
30330
|
throw new Error("Together API key not set");
|
|
@@ -30340,8 +30335,7 @@ var AxAITogether = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
30340
30335
|
config: _config,
|
|
30341
30336
|
options: options,
|
|
30342
30337
|
apiURL: "https://api.together.xyz/v1",
|
|
30343
|
-
modelInfo: axModelInfoTogether
|
|
30344
|
-
modelMap: modelMap
|
|
30338
|
+
modelInfo: axModelInfoTogether
|
|
30345
30339
|
});
|
|
30346
30340
|
_get$4((_assert_this_initialized$c(_this), _get_prototype_of$c(AxAITogether.prototype)), "setName", _this).call(_this, "Together");
|
|
30347
30341
|
return _this;
|
|
@@ -30615,7 +30609,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
30615
30609
|
_inherits$b(AxAICohere, AxBaseAI);
|
|
30616
30610
|
var _super = _create_super$b(AxAICohere);
|
|
30617
30611
|
function AxAICohere(param) {
|
|
30618
|
-
var apiKey = param.apiKey, config = param.config, options = param.options
|
|
30612
|
+
var apiKey = param.apiKey, config = param.config, options = param.options;
|
|
30619
30613
|
_class_call_check$k(this, AxAICohere);
|
|
30620
30614
|
var _this;
|
|
30621
30615
|
if (!apiKey || apiKey === "") {
|
|
@@ -30636,14 +30630,13 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
30636
30630
|
functions: true,
|
|
30637
30631
|
streaming: true
|
|
30638
30632
|
},
|
|
30639
|
-
options: options
|
|
30640
|
-
modelMap: modelMap
|
|
30633
|
+
options: options
|
|
30641
30634
|
});
|
|
30642
30635
|
_define_property$k(_assert_this_initialized$b(_this), "config", void 0);
|
|
30643
30636
|
_define_property$k(_assert_this_initialized$b(_this), "generateChatReq", function(req, // eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
30644
30637
|
_config) {
|
|
30645
30638
|
var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6;
|
|
30646
|
-
var model =
|
|
30639
|
+
var model = _this.config.model;
|
|
30647
30640
|
var lastChatMsg = req.chatPrompt.at(-1);
|
|
30648
30641
|
var restOfChat = req.chatPrompt.slice(0, -1);
|
|
30649
30642
|
var message;
|
|
@@ -30738,7 +30731,7 @@ var AxAICohere = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
30738
30731
|
];
|
|
30739
30732
|
});
|
|
30740
30733
|
_define_property$k(_assert_this_initialized$b(_this), "generateEmbedReq", function(req) {
|
|
30741
|
-
var model =
|
|
30734
|
+
var model = _this.config.embedModel;
|
|
30742
30735
|
if (!model) {
|
|
30743
30736
|
throw new Error("Embed model not set");
|
|
30744
30737
|
}
|
|
@@ -31137,7 +31130,7 @@ var safetySettings = [
|
|
|
31137
31130
|
_inherits$a(AxAIGoogleGemini, AxBaseAI);
|
|
31138
31131
|
var _super = _create_super$a(AxAIGoogleGemini);
|
|
31139
31132
|
function AxAIGoogleGemini(param) {
|
|
31140
|
-
var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options
|
|
31133
|
+
var apiKey = param.apiKey, projectId = param.projectId, region = param.region, config = param.config, options = param.options;
|
|
31141
31134
|
_class_call_check$j(this, AxAIGoogleGemini);
|
|
31142
31135
|
var _this;
|
|
31143
31136
|
if (!apiKey || apiKey === "") {
|
|
@@ -31161,15 +31154,14 @@ var safetySettings = [
|
|
|
31161
31154
|
supportFor: {
|
|
31162
31155
|
functions: true,
|
|
31163
31156
|
streaming: true
|
|
31164
|
-
}
|
|
31165
|
-
modelMap: modelMap
|
|
31157
|
+
}
|
|
31166
31158
|
});
|
|
31167
31159
|
_define_property$j(_assert_this_initialized$a(_this), "options", void 0);
|
|
31168
31160
|
_define_property$j(_assert_this_initialized$a(_this), "config", void 0);
|
|
31169
31161
|
_define_property$j(_assert_this_initialized$a(_this), "apiKey", void 0);
|
|
31170
31162
|
_define_property$j(_assert_this_initialized$a(_this), "generateChatReq", function(req) {
|
|
31171
31163
|
var _req_modelConfig, _this_options, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
|
|
31172
|
-
var model =
|
|
31164
|
+
var model = _this.config.model;
|
|
31173
31165
|
var _req_modelConfig_stream;
|
|
31174
31166
|
var stream = (_req_modelConfig_stream = (_req_modelConfig = req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.stream) !== null && _req_modelConfig_stream !== void 0 ? _req_modelConfig_stream : _this.config.stream;
|
|
31175
31167
|
if (!req.chatPrompt || req.chatPrompt.length === 0) {
|
|
@@ -31349,7 +31341,7 @@ var safetySettings = [
|
|
|
31349
31341
|
];
|
|
31350
31342
|
});
|
|
31351
31343
|
_define_property$j(_assert_this_initialized$a(_this), "generateEmbedReq", function(req) {
|
|
31352
|
-
var model =
|
|
31344
|
+
var model = _this.config.embedModel;
|
|
31353
31345
|
if (!model) {
|
|
31354
31346
|
throw new Error("Embed model not set");
|
|
31355
31347
|
}
|
|
@@ -31713,7 +31705,7 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
31713
31705
|
_inherits$9(AxAIAnthropic, AxBaseAI);
|
|
31714
31706
|
var _super = _create_super$9(AxAIAnthropic);
|
|
31715
31707
|
function AxAIAnthropic(param) {
|
|
31716
|
-
var apiKey = param.apiKey, config = param.config, options = param.options
|
|
31708
|
+
var apiKey = param.apiKey, config = param.config, options = param.options;
|
|
31717
31709
|
_class_call_check$i(this, AxAIAnthropic);
|
|
31718
31710
|
var _this;
|
|
31719
31711
|
if (!apiKey || apiKey === "") {
|
|
@@ -31735,13 +31727,12 @@ var AxAIAnthropic = /*#__PURE__*/ function(AxBaseAI) {
|
|
|
31735
31727
|
supportFor: {
|
|
31736
31728
|
functions: true,
|
|
31737
31729
|
streaming: true
|
|
31738
|
-
}
|
|
31739
|
-
modelMap: modelMap
|
|
31730
|
+
}
|
|
31740
31731
|
});
|
|
31741
31732
|
_define_property$i(_assert_this_initialized$9(_this), "config", void 0);
|
|
31742
31733
|
_define_property$i(_assert_this_initialized$9(_this), "generateChatReq", function(req) {
|
|
31743
31734
|
var _req_functions, _req_modelConfig, _req_modelConfig1, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5;
|
|
31744
|
-
var model =
|
|
31735
|
+
var model = _this.config.model;
|
|
31745
31736
|
var apiConfig = {
|
|
31746
31737
|
name: "/messages"
|
|
31747
31738
|
};
|
|
@@ -32638,7 +32629,7 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
32638
32629
|
_inherits$8(AxAIGroq, AxAIOpenAI);
|
|
32639
32630
|
var _super = _create_super$8(AxAIGroq);
|
|
32640
32631
|
function AxAIGroq(param) {
|
|
32641
|
-
var apiKey = param.apiKey, config = param.config, options = param.options
|
|
32632
|
+
var apiKey = param.apiKey, config = param.config, options = param.options;
|
|
32642
32633
|
_class_call_check$g(this, AxAIGroq);
|
|
32643
32634
|
var _this;
|
|
32644
32635
|
if (!apiKey || apiKey === "") {
|
|
@@ -32653,8 +32644,7 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
32653
32644
|
config: _config,
|
|
32654
32645
|
options: _options,
|
|
32655
32646
|
apiURL: "https://api.groq.com/openai/v1",
|
|
32656
|
-
modelInfo: []
|
|
32657
|
-
modelMap: modelMap
|
|
32647
|
+
modelInfo: []
|
|
32658
32648
|
});
|
|
32659
32649
|
_define_property$g(_assert_this_initialized$8(_this), "setOptions", function(options) {
|
|
32660
32650
|
var rateLimiter = _this.newRateLimiter(options);
|
|
@@ -32710,23 +32700,19 @@ var AxAIGroq = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
32710
32700
|
}
|
|
32711
32701
|
(AxAIOpenAI);
|
|
32712
32702
|
|
|
32713
|
-
// cspell:ignore mistral, mixtral, codestral, nemo
|
|
32714
32703
|
var AxAIMistralModel;
|
|
32715
32704
|
(function(AxAIMistralModel) {
|
|
32716
32705
|
AxAIMistralModel["Mistral7B"] = "open-mistral-7b";
|
|
32717
32706
|
AxAIMistralModel["Mistral8x7B"] = "open-mixtral-8x7b";
|
|
32718
32707
|
AxAIMistralModel["MistralSmall"] = "mistral-small-latest";
|
|
32708
|
+
AxAIMistralModel["MistralMedium"] = "mistral-medium-latest";
|
|
32719
32709
|
AxAIMistralModel["MistralLarge"] = "mistral-large-latest";
|
|
32720
|
-
AxAIMistralModel["Codestral"] = "codestral-latest";
|
|
32721
|
-
AxAIMistralModel["OpenCodestralMamba"] = "open-codestral-mamba";
|
|
32722
|
-
AxAIMistralModel["OpenMistralNemo"] = "open-mistral-nemo-latest";
|
|
32723
32710
|
})(AxAIMistralModel || (AxAIMistralModel = {}));
|
|
32724
32711
|
var AxAIMistralEmbedModels;
|
|
32725
32712
|
(function(AxAIMistralEmbedModels) {
|
|
32726
32713
|
AxAIMistralEmbedModels["MistralEmbed"] = "mistral-embed";
|
|
32727
32714
|
})(AxAIMistralEmbedModels || (AxAIMistralEmbedModels = {}));
|
|
32728
32715
|
|
|
32729
|
-
// cspell:ignore mistral, mixtral, codestral, nemo
|
|
32730
32716
|
var axModelInfoMistral = [
|
|
32731
32717
|
{
|
|
32732
32718
|
name: AxAIMistralModel.Mistral7B,
|
|
@@ -32747,28 +32733,16 @@ var axModelInfoMistral = [
|
|
|
32747
32733
|
completionTokenCostPer1M: 6
|
|
32748
32734
|
},
|
|
32749
32735
|
{
|
|
32750
|
-
name: AxAIMistralModel.
|
|
32751
|
-
currency: "USD",
|
|
32752
|
-
promptTokenCostPer1M: 8,
|
|
32753
|
-
completionTokenCostPer1M: 24
|
|
32754
|
-
},
|
|
32755
|
-
{
|
|
32756
|
-
name: AxAIMistralModel.Codestral,
|
|
32736
|
+
name: AxAIMistralModel.MistralMedium,
|
|
32757
32737
|
currency: "USD",
|
|
32758
|
-
promptTokenCostPer1M:
|
|
32759
|
-
completionTokenCostPer1M:
|
|
32738
|
+
promptTokenCostPer1M: 2.7,
|
|
32739
|
+
completionTokenCostPer1M: 8.1
|
|
32760
32740
|
},
|
|
32761
32741
|
{
|
|
32762
|
-
name: AxAIMistralModel.
|
|
32763
|
-
currency: "USD",
|
|
32764
|
-
promptTokenCostPer1M: 0.25,
|
|
32765
|
-
completionTokenCostPer1M: 0.25
|
|
32766
|
-
},
|
|
32767
|
-
{
|
|
32768
|
-
name: AxAIMistralModel.OpenMistralNemo,
|
|
32742
|
+
name: AxAIMistralModel.MistralLarge,
|
|
32769
32743
|
currency: "USD",
|
|
32770
|
-
promptTokenCostPer1M:
|
|
32771
|
-
completionTokenCostPer1M:
|
|
32744
|
+
promptTokenCostPer1M: 8,
|
|
32745
|
+
completionTokenCostPer1M: 24
|
|
32772
32746
|
}
|
|
32773
32747
|
];
|
|
32774
32748
|
|
|
@@ -32903,7 +32877,7 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
32903
32877
|
_inherits$7(AxAIMistral, AxAIOpenAI);
|
|
32904
32878
|
var _super = _create_super$7(AxAIMistral);
|
|
32905
32879
|
function AxAIMistral(param) {
|
|
32906
|
-
var apiKey = param.apiKey, config = param.config, options = param.options
|
|
32880
|
+
var apiKey = param.apiKey, config = param.config, options = param.options;
|
|
32907
32881
|
_class_call_check$f(this, AxAIMistral);
|
|
32908
32882
|
if (!apiKey || apiKey === "") {
|
|
32909
32883
|
throw new Error("Mistral API key not set");
|
|
@@ -32914,8 +32888,7 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
32914
32888
|
config: _config,
|
|
32915
32889
|
options: options,
|
|
32916
32890
|
apiURL: "https://api.mistral.ai/v1",
|
|
32917
|
-
modelInfo: axModelInfoMistral
|
|
32918
|
-
modelMap: modelMap
|
|
32891
|
+
modelInfo: axModelInfoMistral
|
|
32919
32892
|
});
|
|
32920
32893
|
_get$2((_assert_this_initialized$7(_this), _get_prototype_of$7(AxAIMistral.prototype)), "setName", _this).call(_this, "Mistral");
|
|
32921
32894
|
return _this;
|
|
@@ -33079,7 +33052,7 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
33079
33052
|
_inherits$6(AxAIDeepSeek, AxAIOpenAI);
|
|
33080
33053
|
var _super = _create_super$6(AxAIDeepSeek);
|
|
33081
33054
|
function AxAIDeepSeek(param) {
|
|
33082
|
-
var apiKey = param.apiKey, config = param.config, options = param.options
|
|
33055
|
+
var apiKey = param.apiKey, config = param.config, options = param.options;
|
|
33083
33056
|
_class_call_check$e(this, AxAIDeepSeek);
|
|
33084
33057
|
if (!apiKey || apiKey === "") {
|
|
33085
33058
|
throw new Error("DeepSeek API key not set");
|
|
@@ -33090,8 +33063,7 @@ var AxAIDeepSeek = /*#__PURE__*/ function(AxAIOpenAI) {
|
|
|
33090
33063
|
config: _config,
|
|
33091
33064
|
options: options,
|
|
33092
33065
|
apiURL: "https://api.deepseek.com",
|
|
33093
|
-
modelInfo: axModelInfoDeepSeek
|
|
33094
|
-
modelMap: modelMap
|
|
33066
|
+
modelInfo: axModelInfoDeepSeek
|
|
33095
33067
|
});
|
|
33096
33068
|
_get$1((_assert_this_initialized$6(_this), _get_prototype_of$6(AxAIDeepSeek.prototype)), "setName", _this).call(_this, "DeepSeek");
|
|
33097
33069
|
return _this;
|
|
@@ -33259,15 +33231,14 @@ var axAIOllamaDefaultConfig = function() {
|
|
|
33259
33231
|
_inherits$5(AxAIOllama, AxAIOpenAI);
|
|
33260
33232
|
var _super = _create_super$5(AxAIOllama);
|
|
33261
33233
|
function AxAIOllama(param) {
|
|
33262
|
-
var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options
|
|
33234
|
+
var _param_apiKey = param.apiKey, apiKey = _param_apiKey === void 0 ? "not-set" : _param_apiKey, _param_url = param.url, url = _param_url === void 0 ? "http://localhost:11434" : _param_url, config = param.config, options = param.options;
|
|
33263
33235
|
_class_call_check$d(this, AxAIOllama);
|
|
33264
33236
|
var _config = _object_spread$6({}, axAIOllamaDefaultConfig(), config);
|
|
33265
33237
|
var _this = _super.call(this, {
|
|
33266
33238
|
apiKey: apiKey,
|
|
33267
33239
|
options: options,
|
|
33268
33240
|
config: _config,
|
|
33269
|
-
apiURL: new URL("/
|
|
33270
|
-
modelMap: modelMap
|
|
33241
|
+
apiURL: new URL("/v1", url).href
|
|
33271
33242
|
});
|
|
33272
33243
|
_get((_assert_this_initialized$5(_this), _get_prototype_of$5(AxAIOllama.prototype)), "setName", _this).call(_this, "Ollama");
|
|
33273
33244
|
return _this;
|
|
@@ -33475,6 +33446,18 @@ var AxAI = /*#__PURE__*/ function() {
|
|
|
33475
33446
|
}
|
|
33476
33447
|
}
|
|
33477
33448
|
_create_class$5(AxAI, [
|
|
33449
|
+
{
|
|
33450
|
+
key: "setModelMap",
|
|
33451
|
+
value: function setModelMap(modelMap) {
|
|
33452
|
+
this.ai.setModelMap(modelMap);
|
|
33453
|
+
}
|
|
33454
|
+
},
|
|
33455
|
+
{
|
|
33456
|
+
key: "setEmbedModelMap",
|
|
33457
|
+
value: function setEmbedModelMap(modelMap) {
|
|
33458
|
+
this.ai.setEmbedModelMap(modelMap);
|
|
33459
|
+
}
|
|
33460
|
+
},
|
|
33478
33461
|
{
|
|
33479
33462
|
key: "getName",
|
|
33480
33463
|
value: function getName() {
|
|
@@ -33505,12 +33488,6 @@ var AxAI = /*#__PURE__*/ function() {
|
|
|
33505
33488
|
return this.ai.getFeatures();
|
|
33506
33489
|
}
|
|
33507
33490
|
},
|
|
33508
|
-
{
|
|
33509
|
-
key: "getModelMap",
|
|
33510
|
-
value: function getModelMap() {
|
|
33511
|
-
return this.ai.getModelMap();
|
|
33512
|
-
}
|
|
33513
|
-
},
|
|
33514
33491
|
{
|
|
33515
33492
|
key: "chat",
|
|
33516
33493
|
value: function chat(req, options) {
|
|
@@ -34069,8 +34046,8 @@ var assertRequiredFields = function(sig, values) {
|
|
|
34069
34046
|
});
|
|
34070
34047
|
if (missingFields.length > 0) {
|
|
34071
34048
|
throw new AxAssertionError({
|
|
34072
|
-
message: "
|
|
34073
|
-
return
|
|
34049
|
+
message: "Missing required fields: ".concat(missingFields.map(function(f) {
|
|
34050
|
+
return f.name;
|
|
34074
34051
|
}).join(", ")),
|
|
34075
34052
|
values: values
|
|
34076
34053
|
});
|
|
@@ -38876,7 +38853,7 @@ var AxPromptTemplate = function AxPromptTemplate(sig, fieldTemplates) {
|
|
|
38876
38853
|
this.outputFormat = {
|
|
38877
38854
|
type: "text",
|
|
38878
38855
|
text: [
|
|
38879
|
-
"
|
|
38856
|
+
"Follow the following format."
|
|
38880
38857
|
].concat(_to_consumable_array$2(this.renderOutFields(this.sig.getOutputFields())), [
|
|
38881
38858
|
"---\n\n"
|
|
38882
38859
|
]).join("\n\n")
|
|
@@ -40692,7 +40669,9 @@ let DspService = class DspService {
|
|
|
40692
40669
|
}
|
|
40693
40670
|
async modelOpenAI(params, _pinsSettingsList, context) {
|
|
40694
40671
|
var _context_privates_OPENAI_API_KEY, _context_privates_OPENAI_SERVER;
|
|
40695
|
-
const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config
|
|
40672
|
+
const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config = {
|
|
40673
|
+
model: 'gpt-4o-mini'
|
|
40674
|
+
}, options } = params;
|
|
40696
40675
|
const modelInstance = new AxAIOpenAI({
|
|
40697
40676
|
apiKey,
|
|
40698
40677
|
apiURL,
|