@digipair/skill-dsp 0.11.1 → 0.11.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs.js +17 -15
- package/index.esm.js +17 -15
- package/package.json +1 -1
package/index.cjs.js
CHANGED
|
@@ -23510,14 +23510,14 @@ function indent(str, spaces) {
|
|
|
23510
23510
|
var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
|
|
23511
23511
|
// match is required
|
|
23512
23512
|
if (!match) {
|
|
23513
|
-
return nextMatch = nextMatch1,
|
|
23513
|
+
return nextMatch = nextMatch1, i = i1, tokens = tokens1, {
|
|
23514
23514
|
v: nextMatch1
|
|
23515
23515
|
};
|
|
23516
23516
|
}
|
|
23517
23517
|
var token = match.token, offset = match.offset;
|
|
23518
23518
|
i1 += offset;
|
|
23519
23519
|
if (token === " ") {
|
|
23520
|
-
return nextMatch = nextMatch1,
|
|
23520
|
+
return nextMatch = nextMatch1, i = i1, tokens = tokens1, "continue";
|
|
23521
23521
|
}
|
|
23522
23522
|
tokens1 = _to_consumable_array$1(tokens1).concat([
|
|
23523
23523
|
token
|
|
@@ -23536,7 +23536,7 @@ function indent(str, spaces) {
|
|
|
23536
23536
|
if (contextKeys.some(function(el) {
|
|
23537
23537
|
return el.startsWith(name);
|
|
23538
23538
|
})) {
|
|
23539
|
-
return nextMatch = nextMatch1,
|
|
23539
|
+
return nextMatch = nextMatch1, i = i1, tokens = tokens1, "continue";
|
|
23540
23540
|
}
|
|
23541
23541
|
if (dateTimeIdentifiers.some(function(el) {
|
|
23542
23542
|
return el === name;
|
|
@@ -23555,9 +23555,9 @@ function indent(str, spaces) {
|
|
|
23555
23555
|
if (dateTimeIdentifiers.some(function(el) {
|
|
23556
23556
|
return el.startsWith(name);
|
|
23557
23557
|
})) {
|
|
23558
|
-
return nextMatch = nextMatch1,
|
|
23558
|
+
return nextMatch = nextMatch1, i = i1, tokens = tokens1, "continue";
|
|
23559
23559
|
}
|
|
23560
|
-
return nextMatch = nextMatch1,
|
|
23560
|
+
return nextMatch = nextMatch1, i = i1, tokens = tokens1, {
|
|
23561
23561
|
v: nextMatch1
|
|
23562
23562
|
};
|
|
23563
23563
|
};
|
|
@@ -27461,16 +27461,18 @@ const preparePinsSettings = async (settings, context)=>{
|
|
|
27461
27461
|
|
|
27462
27462
|
let DspService = class DspService {
|
|
27463
27463
|
async model(params, _pinsSettingsList, _context) {
|
|
27464
|
-
const {
|
|
27464
|
+
const { AxAI } = await eval(`import('@ax-llm/ax')`);
|
|
27465
27465
|
const { name, options } = params;
|
|
27466
|
-
const modelInstance =
|
|
27466
|
+
const modelInstance = new AxAI(_extends({
|
|
27467
|
+
name
|
|
27468
|
+
}, options));
|
|
27467
27469
|
return modelInstance;
|
|
27468
27470
|
}
|
|
27469
27471
|
async modelOpenAI(params, _pinsSettingsList, context) {
|
|
27470
|
-
const {
|
|
27472
|
+
const { AxAIOpenAI } = await eval(`import('@ax-llm/ax')`);
|
|
27471
27473
|
var _context_privates_OPENAI_API_KEY, _context_privates_OPENAI_SERVER;
|
|
27472
27474
|
const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config, options } = params;
|
|
27473
|
-
const modelInstance = new
|
|
27475
|
+
const modelInstance = new AxAIOpenAI({
|
|
27474
27476
|
apiKey,
|
|
27475
27477
|
apiURL,
|
|
27476
27478
|
config,
|
|
@@ -27479,10 +27481,10 @@ let DspService = class DspService {
|
|
|
27479
27481
|
return modelInstance;
|
|
27480
27482
|
}
|
|
27481
27483
|
async modelAzureOpenAi(params, _pinsSettingsList, context) {
|
|
27482
|
-
const {
|
|
27484
|
+
const { AxAIAzureOpenAI } = await eval(`import('@ax-llm/ax')`);
|
|
27483
27485
|
var _context_privates_AZURE_OPENAI_API_KEY, _context_privates_AZURE_OPENAI_API_INSTANCE_NAME, _context_privates_AZURE_OPENAI_API_DEPLOYMENT_NAME, _context_privates_AZURE_OPENAI_API_VERSION;
|
|
27484
27486
|
const { apiKey = (_context_privates_AZURE_OPENAI_API_KEY = context.privates.AZURE_OPENAI_API_KEY) != null ? _context_privates_AZURE_OPENAI_API_KEY : process.env['AZURE_OPENAI_API_KEY'], resourceName = (_context_privates_AZURE_OPENAI_API_INSTANCE_NAME = context.privates.AZURE_OPENAI_API_INSTANCE_NAME) != null ? _context_privates_AZURE_OPENAI_API_INSTANCE_NAME : process.env['AZURE_OPENAI_API_INSTANCE_NAME'], deploymentName = (_context_privates_AZURE_OPENAI_API_DEPLOYMENT_NAME = context.privates.AZURE_OPENAI_API_DEPLOYMENT_NAME) != null ? _context_privates_AZURE_OPENAI_API_DEPLOYMENT_NAME : process.env['AZURE_OPENAI_API_DEPLOYMENT_NAME'], version = (_context_privates_AZURE_OPENAI_API_VERSION = context.privates.AZURE_OPENAI_API_VERSION) != null ? _context_privates_AZURE_OPENAI_API_VERSION : process.env['AZURE_OPENAI_API_VERSION'], config, options } = params;
|
|
27485
|
-
const modelInstance = new
|
|
27487
|
+
const modelInstance = new AxAIAzureOpenAI({
|
|
27486
27488
|
apiKey,
|
|
27487
27489
|
resourceName,
|
|
27488
27490
|
deploymentName,
|
|
@@ -27493,9 +27495,9 @@ let DspService = class DspService {
|
|
|
27493
27495
|
return modelInstance;
|
|
27494
27496
|
}
|
|
27495
27497
|
async modelOllama(params, _pinsSettingsList, context) {
|
|
27496
|
-
const {
|
|
27498
|
+
const { AxAIOllama } = await eval(`import('@ax-llm/ax')`);
|
|
27497
27499
|
const { model, url = context.privates.OLLAMA_SERVER ? context.privates.OLLAMA_SERVER + '/v1' : process.env['OLLAMA_SERVER'] ? process.env['OLLAMA_SERVER'] + '/v1' : 'http://localhost:11434/v1', apiKey, config, options } = params;
|
|
27498
|
-
const modelInstance = new
|
|
27500
|
+
const modelInstance = new AxAIOllama({
|
|
27499
27501
|
model,
|
|
27500
27502
|
url,
|
|
27501
27503
|
apiKey,
|
|
@@ -27505,10 +27507,10 @@ let DspService = class DspService {
|
|
|
27505
27507
|
return modelInstance;
|
|
27506
27508
|
}
|
|
27507
27509
|
async generate(params, _pinsSettingsList, context) {
|
|
27508
|
-
const {
|
|
27510
|
+
const { AxGenerate } = await eval(`import('@ax-llm/ax')`);
|
|
27509
27511
|
const { model = context.privates.MODEL_DSP, signature, input } = params;
|
|
27510
27512
|
const modelInstance = await executePinsList(model, context);
|
|
27511
|
-
const gen = new
|
|
27513
|
+
const gen = new AxGenerate(modelInstance, signature);
|
|
27512
27514
|
const result = await gen.forward(input);
|
|
27513
27515
|
return result;
|
|
27514
27516
|
}
|
package/index.esm.js
CHANGED
|
@@ -23488,14 +23488,14 @@ function indent(str, spaces) {
|
|
|
23488
23488
|
var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
|
|
23489
23489
|
// match is required
|
|
23490
23490
|
if (!match) {
|
|
23491
|
-
return
|
|
23491
|
+
return nextMatch = nextMatch1, i = i1, tokens = tokens1, {
|
|
23492
23492
|
v: nextMatch1
|
|
23493
23493
|
};
|
|
23494
23494
|
}
|
|
23495
23495
|
var token = match.token, offset = match.offset;
|
|
23496
23496
|
i1 += offset;
|
|
23497
23497
|
if (token === " ") {
|
|
23498
|
-
return
|
|
23498
|
+
return nextMatch = nextMatch1, i = i1, tokens = tokens1, "continue";
|
|
23499
23499
|
}
|
|
23500
23500
|
tokens1 = _to_consumable_array$1(tokens1).concat([
|
|
23501
23501
|
token
|
|
@@ -23514,7 +23514,7 @@ function indent(str, spaces) {
|
|
|
23514
23514
|
if (contextKeys.some(function(el) {
|
|
23515
23515
|
return el.startsWith(name);
|
|
23516
23516
|
})) {
|
|
23517
|
-
return
|
|
23517
|
+
return nextMatch = nextMatch1, i = i1, tokens = tokens1, "continue";
|
|
23518
23518
|
}
|
|
23519
23519
|
if (dateTimeIdentifiers.some(function(el) {
|
|
23520
23520
|
return el === name;
|
|
@@ -23533,9 +23533,9 @@ function indent(str, spaces) {
|
|
|
23533
23533
|
if (dateTimeIdentifiers.some(function(el) {
|
|
23534
23534
|
return el.startsWith(name);
|
|
23535
23535
|
})) {
|
|
23536
|
-
return
|
|
23536
|
+
return nextMatch = nextMatch1, i = i1, tokens = tokens1, "continue";
|
|
23537
23537
|
}
|
|
23538
|
-
return
|
|
23538
|
+
return nextMatch = nextMatch1, i = i1, tokens = tokens1, {
|
|
23539
23539
|
v: nextMatch1
|
|
23540
23540
|
};
|
|
23541
23541
|
};
|
|
@@ -27439,16 +27439,18 @@ const preparePinsSettings = async (settings, context)=>{
|
|
|
27439
27439
|
|
|
27440
27440
|
let DspService = class DspService {
|
|
27441
27441
|
async model(params, _pinsSettingsList, _context) {
|
|
27442
|
-
const {
|
|
27442
|
+
const { AxAI } = await eval(`import('@ax-llm/ax')`);
|
|
27443
27443
|
const { name, options } = params;
|
|
27444
|
-
const modelInstance =
|
|
27444
|
+
const modelInstance = new AxAI(_extends({
|
|
27445
|
+
name
|
|
27446
|
+
}, options));
|
|
27445
27447
|
return modelInstance;
|
|
27446
27448
|
}
|
|
27447
27449
|
async modelOpenAI(params, _pinsSettingsList, context) {
|
|
27448
|
-
const {
|
|
27450
|
+
const { AxAIOpenAI } = await eval(`import('@ax-llm/ax')`);
|
|
27449
27451
|
var _context_privates_OPENAI_API_KEY, _context_privates_OPENAI_SERVER;
|
|
27450
27452
|
const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config, options } = params;
|
|
27451
|
-
const modelInstance = new
|
|
27453
|
+
const modelInstance = new AxAIOpenAI({
|
|
27452
27454
|
apiKey,
|
|
27453
27455
|
apiURL,
|
|
27454
27456
|
config,
|
|
@@ -27457,10 +27459,10 @@ let DspService = class DspService {
|
|
|
27457
27459
|
return modelInstance;
|
|
27458
27460
|
}
|
|
27459
27461
|
async modelAzureOpenAi(params, _pinsSettingsList, context) {
|
|
27460
|
-
const {
|
|
27462
|
+
const { AxAIAzureOpenAI } = await eval(`import('@ax-llm/ax')`);
|
|
27461
27463
|
var _context_privates_AZURE_OPENAI_API_KEY, _context_privates_AZURE_OPENAI_API_INSTANCE_NAME, _context_privates_AZURE_OPENAI_API_DEPLOYMENT_NAME, _context_privates_AZURE_OPENAI_API_VERSION;
|
|
27462
27464
|
const { apiKey = (_context_privates_AZURE_OPENAI_API_KEY = context.privates.AZURE_OPENAI_API_KEY) != null ? _context_privates_AZURE_OPENAI_API_KEY : process.env['AZURE_OPENAI_API_KEY'], resourceName = (_context_privates_AZURE_OPENAI_API_INSTANCE_NAME = context.privates.AZURE_OPENAI_API_INSTANCE_NAME) != null ? _context_privates_AZURE_OPENAI_API_INSTANCE_NAME : process.env['AZURE_OPENAI_API_INSTANCE_NAME'], deploymentName = (_context_privates_AZURE_OPENAI_API_DEPLOYMENT_NAME = context.privates.AZURE_OPENAI_API_DEPLOYMENT_NAME) != null ? _context_privates_AZURE_OPENAI_API_DEPLOYMENT_NAME : process.env['AZURE_OPENAI_API_DEPLOYMENT_NAME'], version = (_context_privates_AZURE_OPENAI_API_VERSION = context.privates.AZURE_OPENAI_API_VERSION) != null ? _context_privates_AZURE_OPENAI_API_VERSION : process.env['AZURE_OPENAI_API_VERSION'], config, options } = params;
|
|
27463
|
-
const modelInstance = new
|
|
27465
|
+
const modelInstance = new AxAIAzureOpenAI({
|
|
27464
27466
|
apiKey,
|
|
27465
27467
|
resourceName,
|
|
27466
27468
|
deploymentName,
|
|
@@ -27471,9 +27473,9 @@ let DspService = class DspService {
|
|
|
27471
27473
|
return modelInstance;
|
|
27472
27474
|
}
|
|
27473
27475
|
async modelOllama(params, _pinsSettingsList, context) {
|
|
27474
|
-
const {
|
|
27476
|
+
const { AxAIOllama } = await eval(`import('@ax-llm/ax')`);
|
|
27475
27477
|
const { model, url = context.privates.OLLAMA_SERVER ? context.privates.OLLAMA_SERVER + '/v1' : process.env['OLLAMA_SERVER'] ? process.env['OLLAMA_SERVER'] + '/v1' : 'http://localhost:11434/v1', apiKey, config, options } = params;
|
|
27476
|
-
const modelInstance = new
|
|
27478
|
+
const modelInstance = new AxAIOllama({
|
|
27477
27479
|
model,
|
|
27478
27480
|
url,
|
|
27479
27481
|
apiKey,
|
|
@@ -27483,10 +27485,10 @@ let DspService = class DspService {
|
|
|
27483
27485
|
return modelInstance;
|
|
27484
27486
|
}
|
|
27485
27487
|
async generate(params, _pinsSettingsList, context) {
|
|
27486
|
-
const {
|
|
27488
|
+
const { AxGenerate } = await eval(`import('@ax-llm/ax')`);
|
|
27487
27489
|
const { model = context.privates.MODEL_DSP, signature, input } = params;
|
|
27488
27490
|
const modelInstance = await executePinsList(model, context);
|
|
27489
|
-
const gen = new
|
|
27491
|
+
const gen = new AxGenerate(modelInstance, signature);
|
|
27490
27492
|
const result = await gen.forward(input);
|
|
27491
27493
|
return result;
|
|
27492
27494
|
}
|