@digipair/skill-dsp 0.8.26 → 0.8.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs.js +9 -9
- package/index.esm.js +9 -9
- package/package.json +1 -1
package/index.cjs.js
CHANGED
|
@@ -23510,14 +23510,14 @@ function indent(str, spaces) {
|
|
|
23510
23510
|
var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
|
|
23511
23511
|
// match is required
|
|
23512
23512
|
if (!match) {
|
|
23513
|
-
return
|
|
23513
|
+
return nextMatch = nextMatch1, tokens = tokens1, i = i1, {
|
|
23514
23514
|
v: nextMatch1
|
|
23515
23515
|
};
|
|
23516
23516
|
}
|
|
23517
23517
|
var token = match.token, offset = match.offset;
|
|
23518
23518
|
i1 += offset;
|
|
23519
23519
|
if (token === " ") {
|
|
23520
|
-
return
|
|
23520
|
+
return nextMatch = nextMatch1, tokens = tokens1, i = i1, "continue";
|
|
23521
23521
|
}
|
|
23522
23522
|
tokens1 = _to_consumable_array$1(tokens1).concat([
|
|
23523
23523
|
token
|
|
@@ -23536,7 +23536,7 @@ function indent(str, spaces) {
|
|
|
23536
23536
|
if (contextKeys.some(function(el) {
|
|
23537
23537
|
return el.startsWith(name);
|
|
23538
23538
|
})) {
|
|
23539
|
-
return
|
|
23539
|
+
return nextMatch = nextMatch1, tokens = tokens1, i = i1, "continue";
|
|
23540
23540
|
}
|
|
23541
23541
|
if (dateTimeIdentifiers.some(function(el) {
|
|
23542
23542
|
return el === name;
|
|
@@ -23555,9 +23555,9 @@ function indent(str, spaces) {
|
|
|
23555
23555
|
if (dateTimeIdentifiers.some(function(el) {
|
|
23556
23556
|
return el.startsWith(name);
|
|
23557
23557
|
})) {
|
|
23558
|
-
return
|
|
23558
|
+
return nextMatch = nextMatch1, tokens = tokens1, i = i1, "continue";
|
|
23559
23559
|
}
|
|
23560
|
-
return
|
|
23560
|
+
return nextMatch = nextMatch1, tokens = tokens1, i = i1, {
|
|
23561
23561
|
v: nextMatch1
|
|
23562
23562
|
};
|
|
23563
23563
|
};
|
|
@@ -27467,14 +27467,14 @@ let DspService = class DspService {
|
|
|
27467
27467
|
async model(params, _pinsSettingsList, _context) {
|
|
27468
27468
|
const { AI } = await eval(`import('llmclient')`);
|
|
27469
27469
|
const { name, options } = params;
|
|
27470
|
-
const modelInstance = AI(name, options);
|
|
27470
|
+
const modelInstance = new AI(name, options);
|
|
27471
27471
|
return modelInstance;
|
|
27472
27472
|
}
|
|
27473
27473
|
async modelOpenAI(params, _pinsSettingsList, context) {
|
|
27474
27474
|
const { OpenAI } = await eval(`import('llmclient')`);
|
|
27475
27475
|
var _context_privates_OPENAI_API_KEY, _context_privates_OPENAI_SERVER;
|
|
27476
27476
|
const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config, options } = params;
|
|
27477
|
-
const modelInstance = OpenAI({
|
|
27477
|
+
const modelInstance = new OpenAI({
|
|
27478
27478
|
apiKey,
|
|
27479
27479
|
apiURL,
|
|
27480
27480
|
config,
|
|
@@ -27486,7 +27486,7 @@ let DspService = class DspService {
|
|
|
27486
27486
|
const { AzureOpenAi } = await eval(`import('llmclient')`);
|
|
27487
27487
|
var _context_privates_AZURE_OPENAI_API_KEY, _context_privates_AZURE_OPENAI_API_INSTANCE_NAME, _context_privates_AZURE_OPENAI_API_DEPLOYMENT_NAME, _context_privates_AZURE_OPENAI_API_VERSION;
|
|
27488
27488
|
const { apiKey = (_context_privates_AZURE_OPENAI_API_KEY = context.privates.AZURE_OPENAI_API_KEY) != null ? _context_privates_AZURE_OPENAI_API_KEY : process.env['AZURE_OPENAI_API_KEY'], resourceName = (_context_privates_AZURE_OPENAI_API_INSTANCE_NAME = context.privates.AZURE_OPENAI_API_INSTANCE_NAME) != null ? _context_privates_AZURE_OPENAI_API_INSTANCE_NAME : process.env['AZURE_OPENAI_API_INSTANCE_NAME'], deploymentName = (_context_privates_AZURE_OPENAI_API_DEPLOYMENT_NAME = context.privates.AZURE_OPENAI_API_DEPLOYMENT_NAME) != null ? _context_privates_AZURE_OPENAI_API_DEPLOYMENT_NAME : process.env['AZURE_OPENAI_API_DEPLOYMENT_NAME'], version = (_context_privates_AZURE_OPENAI_API_VERSION = context.privates.AZURE_OPENAI_API_VERSION) != null ? _context_privates_AZURE_OPENAI_API_VERSION : process.env['AZURE_OPENAI_API_VERSION'], config, options } = params;
|
|
27489
|
-
const modelInstance = AzureOpenAi({
|
|
27489
|
+
const modelInstance = new AzureOpenAi({
|
|
27490
27490
|
apiKey,
|
|
27491
27491
|
resourceName,
|
|
27492
27492
|
deploymentName,
|
|
@@ -27499,7 +27499,7 @@ let DspService = class DspService {
|
|
|
27499
27499
|
async modelOllama(params, _pinsSettingsList, context) {
|
|
27500
27500
|
const { Ollama } = await eval(`import('llmclient')`);
|
|
27501
27501
|
const { model, url = context.privates.OLLAMA_SERVER ? context.privates.OLLAMA_SERVER + '/v1' : process.env['OLLAMA_SERVER'] ? process.env['OLLAMA_SERVER'] + '/v1' : 'http://localhost:11434/v1', apiKey, config, options } = params;
|
|
27502
|
-
const modelInstance = Ollama({
|
|
27502
|
+
const modelInstance = new Ollama({
|
|
27503
27503
|
model,
|
|
27504
27504
|
url,
|
|
27505
27505
|
apiKey,
|
package/index.esm.js
CHANGED
|
@@ -23488,14 +23488,14 @@ function indent(str, spaces) {
|
|
|
23488
23488
|
var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
|
|
23489
23489
|
// match is required
|
|
23490
23490
|
if (!match) {
|
|
23491
|
-
return i = i1,
|
|
23491
|
+
return i = i1, tokens = tokens1, nextMatch = nextMatch1, {
|
|
23492
23492
|
v: nextMatch1
|
|
23493
23493
|
};
|
|
23494
23494
|
}
|
|
23495
23495
|
var token = match.token, offset = match.offset;
|
|
23496
23496
|
i1 += offset;
|
|
23497
23497
|
if (token === " ") {
|
|
23498
|
-
return i = i1,
|
|
23498
|
+
return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
|
|
23499
23499
|
}
|
|
23500
23500
|
tokens1 = _to_consumable_array$1(tokens1).concat([
|
|
23501
23501
|
token
|
|
@@ -23514,7 +23514,7 @@ function indent(str, spaces) {
|
|
|
23514
23514
|
if (contextKeys.some(function(el) {
|
|
23515
23515
|
return el.startsWith(name);
|
|
23516
23516
|
})) {
|
|
23517
|
-
return i = i1,
|
|
23517
|
+
return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
|
|
23518
23518
|
}
|
|
23519
23519
|
if (dateTimeIdentifiers.some(function(el) {
|
|
23520
23520
|
return el === name;
|
|
@@ -23533,9 +23533,9 @@ function indent(str, spaces) {
|
|
|
23533
23533
|
if (dateTimeIdentifiers.some(function(el) {
|
|
23534
23534
|
return el.startsWith(name);
|
|
23535
23535
|
})) {
|
|
23536
|
-
return i = i1,
|
|
23536
|
+
return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
|
|
23537
23537
|
}
|
|
23538
|
-
return i = i1,
|
|
23538
|
+
return i = i1, tokens = tokens1, nextMatch = nextMatch1, {
|
|
23539
23539
|
v: nextMatch1
|
|
23540
23540
|
};
|
|
23541
23541
|
};
|
|
@@ -27445,14 +27445,14 @@ let DspService = class DspService {
|
|
|
27445
27445
|
async model(params, _pinsSettingsList, _context) {
|
|
27446
27446
|
const { AI } = await eval(`import('llmclient')`);
|
|
27447
27447
|
const { name, options } = params;
|
|
27448
|
-
const modelInstance = AI(name, options);
|
|
27448
|
+
const modelInstance = new AI(name, options);
|
|
27449
27449
|
return modelInstance;
|
|
27450
27450
|
}
|
|
27451
27451
|
async modelOpenAI(params, _pinsSettingsList, context) {
|
|
27452
27452
|
const { OpenAI } = await eval(`import('llmclient')`);
|
|
27453
27453
|
var _context_privates_OPENAI_API_KEY, _context_privates_OPENAI_SERVER;
|
|
27454
27454
|
const { apiKey = (_context_privates_OPENAI_API_KEY = context.privates.OPENAI_API_KEY) != null ? _context_privates_OPENAI_API_KEY : process.env['OPENAI_API_KEY'], apiURL = (_context_privates_OPENAI_SERVER = context.privates.OPENAI_SERVER) != null ? _context_privates_OPENAI_SERVER : process.env['OPENAI_SERVER'], config, options } = params;
|
|
27455
|
-
const modelInstance = OpenAI({
|
|
27455
|
+
const modelInstance = new OpenAI({
|
|
27456
27456
|
apiKey,
|
|
27457
27457
|
apiURL,
|
|
27458
27458
|
config,
|
|
@@ -27464,7 +27464,7 @@ let DspService = class DspService {
|
|
|
27464
27464
|
const { AzureOpenAi } = await eval(`import('llmclient')`);
|
|
27465
27465
|
var _context_privates_AZURE_OPENAI_API_KEY, _context_privates_AZURE_OPENAI_API_INSTANCE_NAME, _context_privates_AZURE_OPENAI_API_DEPLOYMENT_NAME, _context_privates_AZURE_OPENAI_API_VERSION;
|
|
27466
27466
|
const { apiKey = (_context_privates_AZURE_OPENAI_API_KEY = context.privates.AZURE_OPENAI_API_KEY) != null ? _context_privates_AZURE_OPENAI_API_KEY : process.env['AZURE_OPENAI_API_KEY'], resourceName = (_context_privates_AZURE_OPENAI_API_INSTANCE_NAME = context.privates.AZURE_OPENAI_API_INSTANCE_NAME) != null ? _context_privates_AZURE_OPENAI_API_INSTANCE_NAME : process.env['AZURE_OPENAI_API_INSTANCE_NAME'], deploymentName = (_context_privates_AZURE_OPENAI_API_DEPLOYMENT_NAME = context.privates.AZURE_OPENAI_API_DEPLOYMENT_NAME) != null ? _context_privates_AZURE_OPENAI_API_DEPLOYMENT_NAME : process.env['AZURE_OPENAI_API_DEPLOYMENT_NAME'], version = (_context_privates_AZURE_OPENAI_API_VERSION = context.privates.AZURE_OPENAI_API_VERSION) != null ? _context_privates_AZURE_OPENAI_API_VERSION : process.env['AZURE_OPENAI_API_VERSION'], config, options } = params;
|
|
27467
|
-
const modelInstance = AzureOpenAi({
|
|
27467
|
+
const modelInstance = new AzureOpenAi({
|
|
27468
27468
|
apiKey,
|
|
27469
27469
|
resourceName,
|
|
27470
27470
|
deploymentName,
|
|
@@ -27477,7 +27477,7 @@ let DspService = class DspService {
|
|
|
27477
27477
|
async modelOllama(params, _pinsSettingsList, context) {
|
|
27478
27478
|
const { Ollama } = await eval(`import('llmclient')`);
|
|
27479
27479
|
const { model, url = context.privates.OLLAMA_SERVER ? context.privates.OLLAMA_SERVER + '/v1' : process.env['OLLAMA_SERVER'] ? process.env['OLLAMA_SERVER'] + '/v1' : 'http://localhost:11434/v1', apiKey, config, options } = params;
|
|
27480
|
-
const modelInstance = Ollama({
|
|
27480
|
+
const modelInstance = new Ollama({
|
|
27481
27481
|
model,
|
|
27482
27482
|
url,
|
|
27483
27483
|
apiKey,
|