@digipair/skill-dsp 0.88.2 → 0.88.3-0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/index.cjs.js +20 -207
  2. package/index.esm.js +25 -212
  3. package/package.json +2 -4
package/index.cjs.js CHANGED
@@ -120292,21 +120292,22 @@ var axModelInfoMistral = [
120292
120292
  ];
120293
120293
  // ai/mistral/api.ts
120294
120294
  var axAIMistralDefaultConfig = function() {
120295
- return structuredClone(_object_spread({
120295
+ return structuredClone(_object_spread_props(_object_spread({
120296
120296
  model: "mistral-small-latest" /* MistralSmall */
120297
- }, axBaseAIDefaultConfig()));
120297
+ }, axBaseAIDefaultConfig()), {
120298
+ topP: 1
120299
+ }));
120298
120300
  };
120299
120301
  var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAIBase) {
120300
120302
  _inherits(AxAIMistral, AxAIOpenAIBase);
120301
120303
  var _super = _create_super(AxAIMistral);
120302
- function AxAIMistral(config) {
120304
+ function AxAIMistral(param) {
120305
+ var apiKey = param.apiKey, config = param.config, options = param.options, models = param.models, modelInfo = param.modelInfo;
120303
120306
  _class_call_check(this, AxAIMistral);
120304
- var _this;
120305
- var apiKey = config.apiKey, options = config.options, models = config.models, modelInfo = config.modelInfo;
120306
120307
  if (!apiKey || apiKey === "") {
120307
120308
  throw new Error("Mistral API key not set");
120308
120309
  }
120309
- var _config = _object_spread({}, axAIMistralDefaultConfig(), config.config);
120310
+ var _config = _object_spread({}, axAIMistralDefaultConfig(), config);
120310
120311
  modelInfo = _to_consumable_array(axModelInfoMistral).concat(_to_consumable_array(modelInfo !== null && modelInfo !== void 0 ? modelInfo : []));
120311
120312
  var supportFor = {
120312
120313
  functions: true,
@@ -120314,217 +120315,29 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAIBase) {
120314
120315
  hasThinkingBudget: false,
120315
120316
  hasShowThoughts: false
120316
120317
  };
120317
- _this = _super.call(this, {
120318
+ var chatReqUpdater = function(req) {
120319
+ var max_completion_tokens = req.max_completion_tokens, result = _object_without_properties(req, [
120320
+ "max_completion_tokens"
120321
+ ]);
120322
+ return _object_spread_props(_object_spread({}, result), {
120323
+ max_tokens: max_completion_tokens
120324
+ });
120325
+ };
120326
+ var _this = _super.call(this, {
120318
120327
  apiKey: apiKey,
120319
120328
  config: _config,
120320
120329
  options: options,
120321
120330
  apiURL: "https://api.mistral.ai/v1",
120322
120331
  modelInfo: modelInfo,
120323
120332
  models: models,
120324
- supportFor: supportFor
120333
+ supportFor: supportFor,
120334
+ chatReqUpdater: chatReqUpdater
120325
120335
  });
120326
- _define_property(_assert_this_initialized(_this), "config", void 0);
120327
- _define_property(_assert_this_initialized(_this), "streamingUsage", void 0);
120328
- _define_property(_assert_this_initialized(_this), "chatReqUpdater", void 0);
120329
120336
  _get((_assert_this_initialized(_this), _get_prototype_of(AxAIMistral.prototype)), "setName", _this).call(_this, "Mistral");
120330
- _this.config = config;
120331
120337
  return _this;
120332
120338
  }
120333
- _create_class(AxAIMistral, [
120334
- {
120335
- key: "createChatReq",
120336
- value: function createChatReq(req, config) {
120337
- var _req_functions, _req_modelConfig, _req_modelConfig1, _this_config, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6, _req_modelConfig7;
120338
- var model = req.model;
120339
- if (!req.chatPrompt || req.chatPrompt.length === 0) {
120340
- throw new Error("Chat prompt is empty");
120341
- }
120342
- var apiConfig = {
120343
- name: "/chat/completions"
120344
- };
120345
- var tools = (_req_functions = req.functions) === null || _req_functions === void 0 ? void 0 : _req_functions.map(function(v) {
120346
- return {
120347
- type: "function",
120348
- function: {
120349
- name: v.name,
120350
- description: v.description,
120351
- parameters: v.parameters
120352
- }
120353
- };
120354
- });
120355
- var toolsChoice = !req.functionCall && req.functions && req.functions.length > 0 ? "auto" : req.functionCall;
120356
- var messages = createMessages3(req);
120357
- var _req_modelConfig_frequencyPenalty;
120358
- var frequencyPenalty = (_req_modelConfig_frequencyPenalty = (_req_modelConfig = req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.frequencyPenalty) !== null && _req_modelConfig_frequencyPenalty !== void 0 ? _req_modelConfig_frequencyPenalty : this.config.frequencyPenalty;
120359
- var _req_modelConfig_stream;
120360
- var stream = (_req_modelConfig_stream = (_req_modelConfig1 = req.modelConfig) === null || _req_modelConfig1 === void 0 ? void 0 : _req_modelConfig1.stream) !== null && _req_modelConfig_stream !== void 0 ? _req_modelConfig_stream : this.config.stream;
120361
- var store = this.config.store;
120362
- var _req_modelConfig_maxTokens, _req_modelConfig_temperature, _req_modelConfig_topP, _ref, _req_modelConfig_n, _req_modelConfig_stopSequences, _req_modelConfig_presencePenalty;
120363
- var reqValue = _object_spread({
120364
- model: model,
120365
- messages: messages,
120366
- response_format: ((_this_config = this.config) === null || _this_config === void 0 ? void 0 : _this_config.responseFormat) ? {
120367
- type: this.config.responseFormat
120368
- } : void 0,
120369
- tools: tools,
120370
- tool_choice: toolsChoice,
120371
- max_tokens: (_req_modelConfig_maxTokens = (_req_modelConfig2 = req.modelConfig) === null || _req_modelConfig2 === void 0 ? void 0 : _req_modelConfig2.maxTokens) !== null && _req_modelConfig_maxTokens !== void 0 ? _req_modelConfig_maxTokens : this.config.maxTokens,
120372
- temperature: (_req_modelConfig_temperature = (_req_modelConfig3 = req.modelConfig) === null || _req_modelConfig3 === void 0 ? void 0 : _req_modelConfig3.temperature) !== null && _req_modelConfig_temperature !== void 0 ? _req_modelConfig_temperature : this.config.temperature,
120373
- top_p: (_ref = (_req_modelConfig_topP = (_req_modelConfig4 = req.modelConfig) === null || _req_modelConfig4 === void 0 ? void 0 : _req_modelConfig4.topP) !== null && _req_modelConfig_topP !== void 0 ? _req_modelConfig_topP : this.config.topP) !== null && _ref !== void 0 ? _ref : 1,
120374
- n: (_req_modelConfig_n = (_req_modelConfig5 = req.modelConfig) === null || _req_modelConfig5 === void 0 ? void 0 : _req_modelConfig5.n) !== null && _req_modelConfig_n !== void 0 ? _req_modelConfig_n : this.config.n,
120375
- stop: (_req_modelConfig_stopSequences = (_req_modelConfig6 = req.modelConfig) === null || _req_modelConfig6 === void 0 ? void 0 : _req_modelConfig6.stopSequences) !== null && _req_modelConfig_stopSequences !== void 0 ? _req_modelConfig_stopSequences : this.config.stop,
120376
- presence_penalty: (_req_modelConfig_presencePenalty = (_req_modelConfig7 = req.modelConfig) === null || _req_modelConfig7 === void 0 ? void 0 : _req_modelConfig7.presencePenalty) !== null && _req_modelConfig_presencePenalty !== void 0 ? _req_modelConfig_presencePenalty : this.config.presencePenalty,
120377
- logit_bias: this.config.logitBias
120378
- }, frequencyPenalty ? {
120379
- frequency_penalty: frequencyPenalty
120380
- } : {}, stream && this.streamingUsage ? {
120381
- stream: true
120382
- } : {}, store ? {
120383
- store: store
120384
- } : {}, this.config.serviceTier ? {
120385
- service_tier: this.config.serviceTier
120386
- } : {}, this.config.user ? {
120387
- user: this.config.user
120388
- } : {});
120389
- if (this.config.reasoningEffort) {
120390
- reqValue.reasoning_effort = this.config.reasoningEffort;
120391
- }
120392
- if (this.config.webSearchOptions) {
120393
- reqValue.web_search_options = _object_spread({}, this.config.webSearchOptions.searchContextSize && {
120394
- search_context_size: this.config.webSearchOptions.searchContextSize
120395
- }, this.config.webSearchOptions.userLocation && {
120396
- user_location: {
120397
- approximate: _object_spread({
120398
- type: "approximate"
120399
- }, this.config.webSearchOptions.userLocation.approximate.city && {
120400
- city: this.config.webSearchOptions.userLocation.approximate.city
120401
- }, this.config.webSearchOptions.userLocation.approximate.country && {
120402
- country: this.config.webSearchOptions.userLocation.approximate.country
120403
- }, this.config.webSearchOptions.userLocation.approximate.region && {
120404
- region: this.config.webSearchOptions.userLocation.approximate.region
120405
- }, this.config.webSearchOptions.userLocation.approximate.timezone && {
120406
- timezone: this.config.webSearchOptions.userLocation.approximate.timezone
120407
- })
120408
- }
120409
- });
120410
- }
120411
- if (config.thinkingTokenBudget) {
120412
- switch(config.thinkingTokenBudget){
120413
- case "none":
120414
- reqValue.reasoning_effort = void 0;
120415
- break;
120416
- case "minimal":
120417
- reqValue.reasoning_effort = "low";
120418
- break;
120419
- case "low":
120420
- reqValue.reasoning_effort = "medium";
120421
- break;
120422
- case "medium":
120423
- reqValue.reasoning_effort = "high";
120424
- break;
120425
- case "high":
120426
- reqValue.reasoning_effort = "high";
120427
- break;
120428
- case "highest":
120429
- reqValue.reasoning_effort = "high";
120430
- break;
120431
- }
120432
- }
120433
- if (this.chatReqUpdater) {
120434
- reqValue = this.chatReqUpdater(reqValue);
120435
- }
120436
- return [
120437
- apiConfig,
120438
- reqValue
120439
- ];
120440
- }
120441
- }
120442
- ]);
120443
120339
  return AxAIMistral;
120444
120340
  }(AxAIOpenAIBase);
120445
- function createMessages3(req) {
120446
- return req.chatPrompt.map(function(msg) {
120447
- switch(msg.role){
120448
- case "system":
120449
- return {
120450
- role: "system",
120451
- content: msg.content
120452
- };
120453
- case "user":
120454
- if (Array.isArray(msg.content)) {
120455
- return {
120456
- role: "user",
120457
- name: msg.name,
120458
- content: msg.content.map(function(c) {
120459
- switch(c.type){
120460
- case "text":
120461
- return {
120462
- type: "text",
120463
- text: c.text
120464
- };
120465
- case "image":
120466
- {
120467
- var url = "data:".concat(c.mimeType, ";base64,") + c.image;
120468
- var _c_details;
120469
- return {
120470
- type: "image_url",
120471
- image_url: {
120472
- url: url,
120473
- details: (_c_details = c.details) !== null && _c_details !== void 0 ? _c_details : "auto"
120474
- }
120475
- };
120476
- }
120477
- case "audio":
120478
- {
120479
- var data = c.data;
120480
- var _c_format;
120481
- return {
120482
- type: "input_audio",
120483
- input_audio: {
120484
- data: data,
120485
- format: (_c_format = c.format) !== null && _c_format !== void 0 ? _c_format : "wav"
120486
- }
120487
- };
120488
- }
120489
- default:
120490
- throw new Error("Invalid content type");
120491
- }
120492
- })
120493
- };
120494
- }
120495
- return {
120496
- role: "user",
120497
- content: msg.content,
120498
- name: msg.name
120499
- };
120500
- case "assistant":
120501
- var _msg_functionCalls;
120502
- return {
120503
- role: "assistant",
120504
- content: msg.content,
120505
- name: msg.name,
120506
- tool_calls: (_msg_functionCalls = msg.functionCalls) === null || _msg_functionCalls === void 0 ? void 0 : _msg_functionCalls.map(function(v) {
120507
- return {
120508
- id: v.id,
120509
- type: "function",
120510
- function: {
120511
- name: v.function.name,
120512
- arguments: typeof v.function.params === "object" ? JSON.stringify(v.function.params) : v.function.params
120513
- }
120514
- };
120515
- })
120516
- };
120517
- case "function":
120518
- return {
120519
- role: "tool",
120520
- content: msg.result,
120521
- tool_call_id: msg.functionId
120522
- };
120523
- default:
120524
- throw new Error("Invalid role");
120525
- }
120526
- });
120527
- }
120528
120341
  // ai/ollama/api.ts
120529
120342
  var axAIOllamaDefaultConfig = function() {
120530
120343
  return structuredClone(_object_spread_props(_object_spread({}, axBaseAIDefaultConfig()), {
@@ -121583,7 +121396,7 @@ var AxAIRekaImpl = /*#__PURE__*/ function() {
121583
121396
  var apiConfig = {
121584
121397
  name: "/chat/completions"
121585
121398
  };
121586
- var messages = createMessages4(req);
121399
+ var messages = createMessages3(req);
121587
121400
  var _req_modelConfig_frequencyPenalty;
121588
121401
  var frequencyPenalty = (_req_modelConfig_frequencyPenalty = (_req_modelConfig = req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.frequencyPenalty) !== null && _req_modelConfig_frequencyPenalty !== void 0 ? _req_modelConfig_frequencyPenalty : _this.config.frequencyPenalty;
121589
121402
  var _req_modelConfig_stream;
@@ -121697,7 +121510,7 @@ var mapFinishReason3 = function(finishReason) {
121697
121510
  return "length";
121698
121511
  }
121699
121512
  };
121700
- function createMessages4(req) {
121513
+ function createMessages3(req) {
121701
121514
  return req.chatPrompt.map(function(msg) {
121702
121515
  switch(msg.role){
121703
121516
  case "system":
package/index.esm.js CHANGED
@@ -23945,14 +23945,14 @@ function indent(str, spaces) {
23945
23945
  var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
23946
23946
  // match is required
23947
23947
  if (!match) {
23948
- return nextMatch = nextMatch1, i = i1, tokens = tokens1, {
23948
+ return i = i1, tokens = tokens1, nextMatch = nextMatch1, {
23949
23949
  v: nextMatch1
23950
23950
  };
23951
23951
  }
23952
23952
  var token = match.token, offset = match.offset;
23953
23953
  i1 += offset;
23954
23954
  if (token === " ") {
23955
- return nextMatch = nextMatch1, i = i1, tokens = tokens1, "continue";
23955
+ return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
23956
23956
  }
23957
23957
  tokens1 = _to_consumable_array$5(tokens1).concat([
23958
23958
  token
@@ -23971,7 +23971,7 @@ function indent(str, spaces) {
23971
23971
  if (contextKeys.some(function(el) {
23972
23972
  return el.startsWith(name);
23973
23973
  })) {
23974
- return nextMatch = nextMatch1, i = i1, tokens = tokens1, "continue";
23974
+ return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
23975
23975
  }
23976
23976
  if (dateTimeIdentifiers.some(function(el) {
23977
23977
  return el === name;
@@ -23990,9 +23990,9 @@ function indent(str, spaces) {
23990
23990
  if (dateTimeIdentifiers.some(function(el) {
23991
23991
  return el.startsWith(name);
23992
23992
  })) {
23993
- return nextMatch = nextMatch1, i = i1, tokens = tokens1, "continue";
23993
+ return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
23994
23994
  }
23995
- return nextMatch = nextMatch1, i = i1, tokens = tokens1, {
23995
+ return i = i1, tokens = tokens1, nextMatch = nextMatch1, {
23996
23996
  v: nextMatch1
23997
23997
  };
23998
23998
  };
@@ -148231,21 +148231,22 @@ var axModelInfoMistral = [
148231
148231
  ];
148232
148232
  // ai/mistral/api.ts
148233
148233
  var axAIMistralDefaultConfig = function() {
148234
- return structuredClone(_object_spread({
148234
+ return structuredClone(_object_spread_props(_object_spread({
148235
148235
  model: "mistral-small-latest" /* MistralSmall */
148236
- }, axBaseAIDefaultConfig()));
148236
+ }, axBaseAIDefaultConfig()), {
148237
+ topP: 1
148238
+ }));
148237
148239
  };
148238
148240
  var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAIBase) {
148239
148241
  _inherits(AxAIMistral, AxAIOpenAIBase);
148240
148242
  var _super = _create_super(AxAIMistral);
148241
- function AxAIMistral(config) {
148243
+ function AxAIMistral(param) {
148244
+ var apiKey = param.apiKey, config = param.config, options = param.options, models = param.models, modelInfo = param.modelInfo;
148242
148245
  _class_call_check(this, AxAIMistral);
148243
- var _this;
148244
- var apiKey = config.apiKey, options = config.options, models = config.models, modelInfo = config.modelInfo;
148245
148246
  if (!apiKey || apiKey === "") {
148246
148247
  throw new Error("Mistral API key not set");
148247
148248
  }
148248
- var _config = _object_spread({}, axAIMistralDefaultConfig(), config.config);
148249
+ var _config = _object_spread({}, axAIMistralDefaultConfig(), config);
148249
148250
  modelInfo = _to_consumable_array(axModelInfoMistral).concat(_to_consumable_array(modelInfo !== null && modelInfo !== void 0 ? modelInfo : []));
148250
148251
  var supportFor = {
148251
148252
  functions: true,
@@ -148253,217 +148254,29 @@ var AxAIMistral = /*#__PURE__*/ function(AxAIOpenAIBase) {
148253
148254
  hasThinkingBudget: false,
148254
148255
  hasShowThoughts: false
148255
148256
  };
148256
- _this = _super.call(this, {
148257
+ var chatReqUpdater = function(req) {
148258
+ var max_completion_tokens = req.max_completion_tokens, result = _object_without_properties(req, [
148259
+ "max_completion_tokens"
148260
+ ]);
148261
+ return _object_spread_props(_object_spread({}, result), {
148262
+ max_tokens: max_completion_tokens
148263
+ });
148264
+ };
148265
+ var _this = _super.call(this, {
148257
148266
  apiKey: apiKey,
148258
148267
  config: _config,
148259
148268
  options: options,
148260
148269
  apiURL: "https://api.mistral.ai/v1",
148261
148270
  modelInfo: modelInfo,
148262
148271
  models: models,
148263
- supportFor: supportFor
148272
+ supportFor: supportFor,
148273
+ chatReqUpdater: chatReqUpdater
148264
148274
  });
148265
- _define_property(_assert_this_initialized(_this), "config", void 0);
148266
- _define_property(_assert_this_initialized(_this), "streamingUsage", void 0);
148267
- _define_property(_assert_this_initialized(_this), "chatReqUpdater", void 0);
148268
148275
  _get((_assert_this_initialized(_this), _get_prototype_of(AxAIMistral.prototype)), "setName", _this).call(_this, "Mistral");
148269
- _this.config = config;
148270
148276
  return _this;
148271
148277
  }
148272
- _create_class(AxAIMistral, [
148273
- {
148274
- key: "createChatReq",
148275
- value: function createChatReq(req, config) {
148276
- var _req_functions, _req_modelConfig, _req_modelConfig1, _this_config, _req_modelConfig2, _req_modelConfig3, _req_modelConfig4, _req_modelConfig5, _req_modelConfig6, _req_modelConfig7;
148277
- var model = req.model;
148278
- if (!req.chatPrompt || req.chatPrompt.length === 0) {
148279
- throw new Error("Chat prompt is empty");
148280
- }
148281
- var apiConfig = {
148282
- name: "/chat/completions"
148283
- };
148284
- var tools = (_req_functions = req.functions) === null || _req_functions === void 0 ? void 0 : _req_functions.map(function(v) {
148285
- return {
148286
- type: "function",
148287
- function: {
148288
- name: v.name,
148289
- description: v.description,
148290
- parameters: v.parameters
148291
- }
148292
- };
148293
- });
148294
- var toolsChoice = !req.functionCall && req.functions && req.functions.length > 0 ? "auto" : req.functionCall;
148295
- var messages = createMessages3(req);
148296
- var _req_modelConfig_frequencyPenalty;
148297
- var frequencyPenalty = (_req_modelConfig_frequencyPenalty = (_req_modelConfig = req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.frequencyPenalty) !== null && _req_modelConfig_frequencyPenalty !== void 0 ? _req_modelConfig_frequencyPenalty : this.config.frequencyPenalty;
148298
- var _req_modelConfig_stream;
148299
- var stream = (_req_modelConfig_stream = (_req_modelConfig1 = req.modelConfig) === null || _req_modelConfig1 === void 0 ? void 0 : _req_modelConfig1.stream) !== null && _req_modelConfig_stream !== void 0 ? _req_modelConfig_stream : this.config.stream;
148300
- var store = this.config.store;
148301
- var _req_modelConfig_maxTokens, _req_modelConfig_temperature, _req_modelConfig_topP, _ref, _req_modelConfig_n, _req_modelConfig_stopSequences, _req_modelConfig_presencePenalty;
148302
- var reqValue = _object_spread({
148303
- model: model,
148304
- messages: messages,
148305
- response_format: ((_this_config = this.config) === null || _this_config === void 0 ? void 0 : _this_config.responseFormat) ? {
148306
- type: this.config.responseFormat
148307
- } : void 0,
148308
- tools: tools,
148309
- tool_choice: toolsChoice,
148310
- max_tokens: (_req_modelConfig_maxTokens = (_req_modelConfig2 = req.modelConfig) === null || _req_modelConfig2 === void 0 ? void 0 : _req_modelConfig2.maxTokens) !== null && _req_modelConfig_maxTokens !== void 0 ? _req_modelConfig_maxTokens : this.config.maxTokens,
148311
- temperature: (_req_modelConfig_temperature = (_req_modelConfig3 = req.modelConfig) === null || _req_modelConfig3 === void 0 ? void 0 : _req_modelConfig3.temperature) !== null && _req_modelConfig_temperature !== void 0 ? _req_modelConfig_temperature : this.config.temperature,
148312
- top_p: (_ref = (_req_modelConfig_topP = (_req_modelConfig4 = req.modelConfig) === null || _req_modelConfig4 === void 0 ? void 0 : _req_modelConfig4.topP) !== null && _req_modelConfig_topP !== void 0 ? _req_modelConfig_topP : this.config.topP) !== null && _ref !== void 0 ? _ref : 1,
148313
- n: (_req_modelConfig_n = (_req_modelConfig5 = req.modelConfig) === null || _req_modelConfig5 === void 0 ? void 0 : _req_modelConfig5.n) !== null && _req_modelConfig_n !== void 0 ? _req_modelConfig_n : this.config.n,
148314
- stop: (_req_modelConfig_stopSequences = (_req_modelConfig6 = req.modelConfig) === null || _req_modelConfig6 === void 0 ? void 0 : _req_modelConfig6.stopSequences) !== null && _req_modelConfig_stopSequences !== void 0 ? _req_modelConfig_stopSequences : this.config.stop,
148315
- presence_penalty: (_req_modelConfig_presencePenalty = (_req_modelConfig7 = req.modelConfig) === null || _req_modelConfig7 === void 0 ? void 0 : _req_modelConfig7.presencePenalty) !== null && _req_modelConfig_presencePenalty !== void 0 ? _req_modelConfig_presencePenalty : this.config.presencePenalty,
148316
- logit_bias: this.config.logitBias
148317
- }, frequencyPenalty ? {
148318
- frequency_penalty: frequencyPenalty
148319
- } : {}, stream && this.streamingUsage ? {
148320
- stream: true
148321
- } : {}, store ? {
148322
- store: store
148323
- } : {}, this.config.serviceTier ? {
148324
- service_tier: this.config.serviceTier
148325
- } : {}, this.config.user ? {
148326
- user: this.config.user
148327
- } : {});
148328
- if (this.config.reasoningEffort) {
148329
- reqValue.reasoning_effort = this.config.reasoningEffort;
148330
- }
148331
- if (this.config.webSearchOptions) {
148332
- reqValue.web_search_options = _object_spread({}, this.config.webSearchOptions.searchContextSize && {
148333
- search_context_size: this.config.webSearchOptions.searchContextSize
148334
- }, this.config.webSearchOptions.userLocation && {
148335
- user_location: {
148336
- approximate: _object_spread({
148337
- type: "approximate"
148338
- }, this.config.webSearchOptions.userLocation.approximate.city && {
148339
- city: this.config.webSearchOptions.userLocation.approximate.city
148340
- }, this.config.webSearchOptions.userLocation.approximate.country && {
148341
- country: this.config.webSearchOptions.userLocation.approximate.country
148342
- }, this.config.webSearchOptions.userLocation.approximate.region && {
148343
- region: this.config.webSearchOptions.userLocation.approximate.region
148344
- }, this.config.webSearchOptions.userLocation.approximate.timezone && {
148345
- timezone: this.config.webSearchOptions.userLocation.approximate.timezone
148346
- })
148347
- }
148348
- });
148349
- }
148350
- if (config.thinkingTokenBudget) {
148351
- switch(config.thinkingTokenBudget){
148352
- case "none":
148353
- reqValue.reasoning_effort = void 0;
148354
- break;
148355
- case "minimal":
148356
- reqValue.reasoning_effort = "low";
148357
- break;
148358
- case "low":
148359
- reqValue.reasoning_effort = "medium";
148360
- break;
148361
- case "medium":
148362
- reqValue.reasoning_effort = "high";
148363
- break;
148364
- case "high":
148365
- reqValue.reasoning_effort = "high";
148366
- break;
148367
- case "highest":
148368
- reqValue.reasoning_effort = "high";
148369
- break;
148370
- }
148371
- }
148372
- if (this.chatReqUpdater) {
148373
- reqValue = this.chatReqUpdater(reqValue);
148374
- }
148375
- return [
148376
- apiConfig,
148377
- reqValue
148378
- ];
148379
- }
148380
- }
148381
- ]);
148382
148278
  return AxAIMistral;
148383
148279
  }(AxAIOpenAIBase);
148384
- function createMessages3(req) {
148385
- return req.chatPrompt.map(function(msg) {
148386
- switch(msg.role){
148387
- case "system":
148388
- return {
148389
- role: "system",
148390
- content: msg.content
148391
- };
148392
- case "user":
148393
- if (Array.isArray(msg.content)) {
148394
- return {
148395
- role: "user",
148396
- name: msg.name,
148397
- content: msg.content.map(function(c) {
148398
- switch(c.type){
148399
- case "text":
148400
- return {
148401
- type: "text",
148402
- text: c.text
148403
- };
148404
- case "image":
148405
- {
148406
- var url = "data:".concat(c.mimeType, ";base64,") + c.image;
148407
- var _c_details;
148408
- return {
148409
- type: "image_url",
148410
- image_url: {
148411
- url: url,
148412
- details: (_c_details = c.details) !== null && _c_details !== void 0 ? _c_details : "auto"
148413
- }
148414
- };
148415
- }
148416
- case "audio":
148417
- {
148418
- var data = c.data;
148419
- var _c_format;
148420
- return {
148421
- type: "input_audio",
148422
- input_audio: {
148423
- data: data,
148424
- format: (_c_format = c.format) !== null && _c_format !== void 0 ? _c_format : "wav"
148425
- }
148426
- };
148427
- }
148428
- default:
148429
- throw new Error("Invalid content type");
148430
- }
148431
- })
148432
- };
148433
- }
148434
- return {
148435
- role: "user",
148436
- content: msg.content,
148437
- name: msg.name
148438
- };
148439
- case "assistant":
148440
- var _msg_functionCalls;
148441
- return {
148442
- role: "assistant",
148443
- content: msg.content,
148444
- name: msg.name,
148445
- tool_calls: (_msg_functionCalls = msg.functionCalls) === null || _msg_functionCalls === void 0 ? void 0 : _msg_functionCalls.map(function(v) {
148446
- return {
148447
- id: v.id,
148448
- type: "function",
148449
- function: {
148450
- name: v.function.name,
148451
- arguments: typeof v.function.params === "object" ? JSON.stringify(v.function.params) : v.function.params
148452
- }
148453
- };
148454
- })
148455
- };
148456
- case "function":
148457
- return {
148458
- role: "tool",
148459
- content: msg.result,
148460
- tool_call_id: msg.functionId
148461
- };
148462
- default:
148463
- throw new Error("Invalid role");
148464
- }
148465
- });
148466
- }
148467
148280
  // ai/ollama/api.ts
148468
148281
  var axAIOllamaDefaultConfig = function() {
148469
148282
  return structuredClone(_object_spread_props(_object_spread({}, axBaseAIDefaultConfig()), {
@@ -149522,7 +149335,7 @@ var AxAIRekaImpl = /*#__PURE__*/ function() {
149522
149335
  var apiConfig = {
149523
149336
  name: "/chat/completions"
149524
149337
  };
149525
- var messages = createMessages4(req);
149338
+ var messages = createMessages3(req);
149526
149339
  var _req_modelConfig_frequencyPenalty;
149527
149340
  var frequencyPenalty = (_req_modelConfig_frequencyPenalty = (_req_modelConfig = req.modelConfig) === null || _req_modelConfig === void 0 ? void 0 : _req_modelConfig.frequencyPenalty) !== null && _req_modelConfig_frequencyPenalty !== void 0 ? _req_modelConfig_frequencyPenalty : _this.config.frequencyPenalty;
149528
149341
  var _req_modelConfig_stream;
@@ -149636,7 +149449,7 @@ var mapFinishReason3 = function(finishReason) {
149636
149449
  return "length";
149637
149450
  }
149638
149451
  };
149639
- function createMessages4(req) {
149452
+ function createMessages3(req) {
149640
149453
  return req.chatPrompt.map(function(msg) {
149641
149454
  switch(msg.role){
149642
149455
  case "system":
package/package.json CHANGED
@@ -1,14 +1,12 @@
1
1
  {
2
2
  "name": "@digipair/skill-dsp",
3
- "version": "0.88.2",
3
+ "version": "0.88.3-0",
4
4
  "keywords": [
5
5
  "digipair",
6
6
  "service",
7
7
  "tool"
8
8
  ],
9
- "dependencies": {
10
- "@digipair/ax": "11.0.64-1"
11
- },
9
+ "dependencies": {},
12
10
  "main": "./index.cjs.js",
13
11
  "module": "./index.esm.js"
14
12
  }