utilitas 1998.2.64 → 1998.2.65
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +11 -11
- package/dist/utilitas.lite.mjs +1 -1
- package/dist/utilitas.lite.mjs.map +1 -1
- package/lib/alan.mjs +23 -27
- package/lib/manifest.mjs +1 -1
- package/package.json +1 -1
package/lib/alan.mjs
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { fileTypeFromBuffer } from 'file-type';
|
|
2
2
|
import { end, loop } from './event.mjs';
|
|
3
3
|
import { createWavHeader } from './media.mjs';
|
|
4
|
-
import { checkSearch, search } from './shot.mjs';
|
|
4
|
+
import get, { checkSearch, search } from './shot.mjs';
|
|
5
5
|
import { BASE64, BUFFER, DATAURL, MIME_BINARY, STREAM, convert } from './storage.mjs';
|
|
6
6
|
import { create as createUoid } from './uoid.mjs';
|
|
7
7
|
import { distill } from './web.mjs';
|
|
@@ -702,12 +702,8 @@ const buildGeminiHistory = (text, options) => buildGeminiMessage(
|
|
|
702
702
|
text, { ...options || {}, history: true }
|
|
703
703
|
);
|
|
704
704
|
|
|
705
|
-
const
|
|
706
|
-
|
|
707
|
-
);
|
|
708
|
-
|
|
709
|
-
const listOpenAIModels = async (options) => {
|
|
710
|
-
const { client } = await getOpenAIClient(options);
|
|
705
|
+
const listOpenAIModels = async (aiId, options) => {
|
|
706
|
+
const { client } = await getAi(aiId);
|
|
711
707
|
const resp = await client.models.list();
|
|
712
708
|
return options?.raw ? resp : resp.data;
|
|
713
709
|
};
|
|
@@ -1139,8 +1135,8 @@ const promptAnthropic = async (aiId, content, options = {}) => {
|
|
|
1139
1135
|
return packResp({ text: mergeMsgs(toolsResponse, tool_use) }, options);
|
|
1140
1136
|
};
|
|
1141
1137
|
|
|
1142
|
-
const uploadFile = async (input, options) => {
|
|
1143
|
-
const { client } = await
|
|
1138
|
+
const uploadFile = async (aiId, input, options) => {
|
|
1139
|
+
const { client } = await getAi(aiId);
|
|
1144
1140
|
const { content: file, cleanup } = await convert(input, {
|
|
1145
1141
|
input: options?.input, ...options || {}, expected: STREAM,
|
|
1146
1142
|
errorMessage: INVALID_FILE, suffix: options?.suffix,
|
|
@@ -1151,20 +1147,20 @@ const uploadFile = async (input, options) => {
|
|
|
1151
1147
|
return resp;
|
|
1152
1148
|
};
|
|
1153
1149
|
|
|
1154
|
-
const uploadFileForFineTuning = async (content, options) => await uploadFile(
|
|
1155
|
-
content, { suffix: 'jsonl', ...options, params: { purpose: 'fine-tune' } }
|
|
1150
|
+
const uploadFileForFineTuning = async (aiId, content, options) => await uploadFile(
|
|
1151
|
+
aiId, content, { suffix: 'jsonl', ...options, params: { purpose: 'fine-tune' } }
|
|
1156
1152
|
);
|
|
1157
1153
|
|
|
1158
|
-
const listFiles = async (options) => {
|
|
1159
|
-
const { client } = await
|
|
1154
|
+
const listFiles = async (aiId, options) => {
|
|
1155
|
+
const { client } = await getAi(aiId);
|
|
1160
1156
|
const files = [];
|
|
1161
1157
|
const list = await client.files.list(options?.params || {});
|
|
1162
1158
|
for await (const file of list) { files.push(file); }
|
|
1163
1159
|
return files;
|
|
1164
1160
|
};
|
|
1165
1161
|
|
|
1166
|
-
const deleteFile = async (file_id, options) => {
|
|
1167
|
-
const { client } = await
|
|
1162
|
+
const deleteFile = async (aiId, file_id, options) => {
|
|
1163
|
+
const { client } = await getAi(aiId);
|
|
1168
1164
|
return await client.files.del(file_id);
|
|
1169
1165
|
};
|
|
1170
1166
|
|
|
@@ -1294,48 +1290,48 @@ const buildGptTrainingCases = (cases, opts) => cases.map(x => JSON.stringify(
|
|
|
1294
1290
|
buildGptTrainingCase(x.prompt, x.response, { ...x.options, ...opts })
|
|
1295
1291
|
)).join('\n');
|
|
1296
1292
|
|
|
1297
|
-
const createGptFineTuningJob = async (training_file, options) => {
|
|
1298
|
-
const { client } = await
|
|
1293
|
+
const createGptFineTuningJob = async (aiId, training_file, options) => {
|
|
1294
|
+
const { client } = await getAi(aiId);
|
|
1299
1295
|
return await client.fineTuning.jobs.create({
|
|
1300
1296
|
training_file, model: options?.model || DEFAULT_MODELS[OPENAI_TRAINING],
|
|
1301
1297
|
})
|
|
1302
1298
|
};
|
|
1303
1299
|
|
|
1304
|
-
const getGptFineTuningJob = async (job_id, options) => {
|
|
1305
|
-
const { client } = await
|
|
1300
|
+
const getGptFineTuningJob = async (aiId, job_id, options) => {
|
|
1301
|
+
const { client } = await getAi(aiId);
|
|
1306
1302
|
// https://platform.openai.com/finetune/[job_id]?filter=all
|
|
1307
1303
|
return await client.fineTuning.jobs.retrieve(job_id);
|
|
1308
1304
|
};
|
|
1309
1305
|
|
|
1310
|
-
const cancelGptFineTuningJob = async (job_id, options) => {
|
|
1311
|
-
const { client } = await
|
|
1306
|
+
const cancelGptFineTuningJob = async (aiId, job_id, options) => {
|
|
1307
|
+
const { client } = await getAi(aiId);
|
|
1312
1308
|
return await client.fineTuning.jobs.cancel(job_id);
|
|
1313
1309
|
};
|
|
1314
1310
|
|
|
1315
|
-
const listGptFineTuningJobs = async (options) => {
|
|
1316
|
-
const { client } = await
|
|
1311
|
+
const listGptFineTuningJobs = async (aiId, options) => {
|
|
1312
|
+
const { client } = await getAi(aiId);
|
|
1317
1313
|
const resp = await client.fineTuning.jobs.list({
|
|
1318
1314
|
limit: GPT_QUERY_LIMIT, ...options?.params
|
|
1319
1315
|
});
|
|
1320
1316
|
return options?.raw ? resp : resp.data;
|
|
1321
1317
|
};
|
|
1322
1318
|
|
|
1323
|
-
const listGptFineTuningEvents = async (job_id, options) => {
|
|
1324
|
-
const { client } = await
|
|
1319
|
+
const listGptFineTuningEvents = async (aiId, job_id, options) => {
|
|
1320
|
+
const { client } = await getAi(aiId);
|
|
1325
1321
|
const resp = await client.fineTuning.jobs.listEvents(job_id, {
|
|
1326
1322
|
limit: GPT_QUERY_LIMIT, ...options?.params,
|
|
1327
1323
|
});
|
|
1328
1324
|
return options?.raw ? resp : resp.data;
|
|
1329
1325
|
};
|
|
1330
1326
|
|
|
1331
|
-
const tailGptFineTuningEvents = async (job_id, options) => {
|
|
1327
|
+
const tailGptFineTuningEvents = async (aiId, job_id, options) => {
|
|
1332
1328
|
assert(job_id, 'Job ID is required.');
|
|
1333
1329
|
const [loopName, listOpts] = [`GPT - ${job_id} `, {
|
|
1334
1330
|
...options, params: { ...options?.params, order: 'ascending' }
|
|
1335
1331
|
}];
|
|
1336
1332
|
let lastEvent;
|
|
1337
1333
|
return await loop(async () => {
|
|
1338
|
-
const resp = await listGptFineTuningEvents(job_id, {
|
|
1334
|
+
const resp = await listGptFineTuningEvents(aiId, job_id, {
|
|
1339
1335
|
...listOpts, params: {
|
|
1340
1336
|
...listOpts?.params,
|
|
1341
1337
|
...(lastEvent ? { after: lastEvent.id } : {}),
|
package/lib/manifest.mjs
CHANGED