utilitas 1998.2.17 → 1998.2.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -4
- package/dist/utilitas.lite.mjs +1 -1
- package/dist/utilitas.lite.mjs.map +1 -1
- package/lib/alan.mjs +107 -53
- package/lib/manifest.mjs +2 -1
- package/lib/storage.mjs +12 -24
- package/lib/vision.mjs +6 -4
- package/package.json +2 -1
package/lib/alan.mjs
CHANGED
|
@@ -1,37 +1,42 @@
|
|
|
1
|
+
import { fileTypeFromBuffer } from 'file-type';
|
|
2
|
+
import { end, loop } from './event.mjs';
|
|
3
|
+
import { createWavHeader } from './media.mjs';
|
|
1
4
|
import { BASE64, BUFFER, DATAURL, MIME_BINARY, STREAM, convert } from './storage.mjs';
|
|
2
5
|
import { create as createUoid } from './uoid.mjs';
|
|
3
|
-
import { createWavHeader } from './media.mjs';
|
|
4
|
-
import { end, loop } from './event.mjs';
|
|
5
|
-
import { fileTypeFromBuffer } from 'file-type';
|
|
6
6
|
|
|
7
7
|
import {
|
|
8
|
+
log as _log,
|
|
9
|
+
renderText as _renderText,
|
|
8
10
|
base64Encode, ensureArray, ensureString, extract, ignoreErrFunc,
|
|
9
|
-
|
|
11
|
+
need, parseJson,
|
|
12
|
+
throwError,
|
|
10
13
|
} from './utilitas.mjs';
|
|
11
14
|
|
|
12
15
|
const _NEED = [
|
|
13
|
-
'@anthropic-ai/sdk', '@
|
|
14
|
-
'OpenAI',
|
|
16
|
+
'@anthropic-ai/sdk', '@anthropic-ai/vertex-sdk', '@google/generative-ai',
|
|
17
|
+
'js-tiktoken', 'ollama', 'OpenAI',
|
|
15
18
|
];
|
|
16
19
|
|
|
17
20
|
const [
|
|
18
21
|
OPENAI, GEMINI, CHATGPT, OPENAI_EMBEDDING, GEMINI_EMEDDING, OPENAI_TRAINING,
|
|
19
22
|
OLLAMA, CLAUDE, GPT_4O_MINI, GPT_4O, GPT_O1, GPT_O3_MINI, GEMINI_20_FLASH,
|
|
20
|
-
GEMINI_20_FLASH_THINKING, NOVA, EMBEDDING_001, DEEPSEEK_R1,
|
|
21
|
-
|
|
22
|
-
|
|
23
|
+
GEMINI_20_FLASH_THINKING, GEMINI_20_PRO, NOVA, EMBEDDING_001, DEEPSEEK_R1,
|
|
24
|
+
DEEPSEEK_R1_32B, DEEPSEEK_R1_70B, MD_CODE, CHATGPT_REASONING,
|
|
25
|
+
TEXT_EMBEDDING_3_SMALL, TEXT_EMBEDDING_3_LARGE, CLAUDE_35_SONNET,
|
|
26
|
+
CLAUDE_35_HAIKU, CLOUD_37_SONNET, AUDIO, WAV, CHATGPT_MINI, ATTACHMENTS,
|
|
23
27
|
CHAT, OPENAI_VOICE, MEDIUM, LOW, HIGH, GPT_REASONING_EFFORT, THINK,
|
|
24
|
-
THINK_STR, THINK_END,
|
|
28
|
+
THINK_STR, THINK_END, AZURE,
|
|
25
29
|
] = [
|
|
26
30
|
'OPENAI', 'GEMINI', 'CHATGPT', 'OPENAI_EMBEDDING', 'GEMINI_EMEDDING',
|
|
27
|
-
'OPENAI_TRAINING', 'OLLAMA', 'CLAUDE', 'gpt-4o-mini', 'gpt-4o',
|
|
28
|
-
'
|
|
29
|
-
'gemini-2.0-
|
|
30
|
-
'deepseek-r1:32b', '
|
|
31
|
-
'text-embedding-3-
|
|
32
|
-
'claude-3-5-
|
|
31
|
+
'OPENAI_TRAINING', 'OLLAMA', 'CLAUDE', 'gpt-4o-mini', 'gpt-4o', 'o1',
|
|
32
|
+
'o3-mini', 'gemini-2.0-flash', 'gemini-2.0-flash-thinking-exp',
|
|
33
|
+
'gemini-2.0-pro-exp', 'nova', 'embedding-001', 'deepseek-r1',
|
|
34
|
+
'deepseek-r1:32b', 'deepseek-r1:70b', '```', 'CHATGPT_REASONING',
|
|
35
|
+
'text-embedding-3-small', 'text-embedding-3-large',
|
|
36
|
+
'claude-3-5-sonnet-latest', 'claude-3-5-haiku-latest',
|
|
37
|
+
'claude-3-7-sonnet@20250219', 'audio', 'wav', 'CHATGPT_MINI',
|
|
33
38
|
'[ATTACHMENTS]', 'CHAT', 'OPENAI_VOICE', 'medium', 'low', 'high',
|
|
34
|
-
'medium', 'think', '<think>', '</think>',
|
|
39
|
+
'medium', 'think', '<think>', '</think>', 'AZURE',
|
|
35
40
|
];
|
|
36
41
|
|
|
37
42
|
const [
|
|
@@ -81,10 +86,11 @@ const DEFAULT_MODELS = {
|
|
|
81
86
|
[CHATGPT_MINI]: GPT_4O_MINI,
|
|
82
87
|
[CHATGPT_REASONING]: GPT_O3_MINI,
|
|
83
88
|
[CHATGPT]: GPT_4O,
|
|
84
|
-
[CLAUDE]:
|
|
89
|
+
[CLAUDE]: CLOUD_37_SONNET,
|
|
85
90
|
[GEMINI_EMEDDING]: EMBEDDING_001,
|
|
86
91
|
[GEMINI]: GEMINI_20_FLASH,
|
|
87
92
|
[OLLAMA]: DEEPSEEK_R1,
|
|
93
|
+
[AZURE]: DEEPSEEK_R1,
|
|
88
94
|
[OPENAI_EMBEDDING]: TEXT_EMBEDDING_3_SMALL,
|
|
89
95
|
[OPENAI_TRAINING]: GPT_4O_MINI, // https://platform.openai.com/docs/guides/fine-tuning
|
|
90
96
|
[OPENAI_VOICE]: NOVA,
|
|
@@ -201,7 +207,7 @@ const MODELS = {
|
|
|
201
207
|
maxVideoLengthWithAudio: 60 * 50, // 50 minutes
|
|
202
208
|
maxVideoLengthWithoutAudio: 60 * 60, // 1 hour
|
|
203
209
|
maxVideoPerPrompt: 10,
|
|
204
|
-
requestLimitsRPM:
|
|
210
|
+
requestLimitsRPM: 2000,
|
|
205
211
|
requestLimitsRPD: 1500,
|
|
206
212
|
tokenLimitsTPM: 4 * 1000000,
|
|
207
213
|
trainingData: 'August 2024',
|
|
@@ -221,7 +227,7 @@ const MODELS = {
|
|
|
221
227
|
maxImageSize: Infinity,
|
|
222
228
|
maxOutputTokens: 1024 * 8,
|
|
223
229
|
maxUrlSize: 1024 * 1024 * 1024 * 2, // 2 GB
|
|
224
|
-
requestLimitsRPM:
|
|
230
|
+
requestLimitsRPM: 1000,
|
|
225
231
|
requestLimitsRPD: 1500,
|
|
226
232
|
tokenLimitsTPM: 4 * 1000000,
|
|
227
233
|
trainingData: 'August 2024',
|
|
@@ -232,8 +238,27 @@ const MODELS = {
|
|
|
232
238
|
png, jpeg,
|
|
233
239
|
],
|
|
234
240
|
},
|
|
241
|
+
[GEMINI_20_PRO]: {
|
|
242
|
+
contextWindow: 2097152,
|
|
243
|
+
imageCostTokens: size8k / (768 * 768) * 258,
|
|
244
|
+
maxFileSize: 20 * 1024 * 1024, // 20 MB
|
|
245
|
+
maxImagePerPrompt: 3000,
|
|
246
|
+
maxImageSize: Infinity,
|
|
247
|
+
maxOutputTokens: 1024 * 8,
|
|
248
|
+
maxUrlSize: 1024 * 1024 * 1024 * 2, // 2 GB
|
|
249
|
+
requestLimitsRPM: 1000,
|
|
250
|
+
requestLimitsRPD: 1500,
|
|
251
|
+
tokenLimitsTPM: 4 * 1000000,
|
|
252
|
+
trainingData: 'August 2024',
|
|
253
|
+
vision: true,
|
|
254
|
+
json: true,
|
|
255
|
+
supportedMimeTypes: [
|
|
256
|
+
png, jpeg, mov, mpeg, mp4, mpg, avi, wmv, mpegps, flv, pdf, aac,
|
|
257
|
+
flac, mp3, m4a, mpga, opus, pcm, wav, webm, tgpp,
|
|
258
|
+
],
|
|
259
|
+
},
|
|
235
260
|
[DEEPSEEK_R1]: {
|
|
236
|
-
contextWindow:
|
|
261
|
+
contextWindow: 128 * 1000,
|
|
237
262
|
maxOutputTokens: 32768,
|
|
238
263
|
requestLimitsRPM: Infinity,
|
|
239
264
|
tokenLimitsTPM: Infinity,
|
|
@@ -279,10 +304,29 @@ const MODELS = {
|
|
|
279
304
|
png, jpeg, gif, webp, pdf,
|
|
280
305
|
],
|
|
281
306
|
},
|
|
307
|
+
// https://console.cloud.google.com/vertex-ai/publishers/anthropic/model-garden/claude-3-7-sonnet?authuser=5&inv=1&invt=Abqftg&project=backend-alpha-97077
|
|
308
|
+
[CLOUD_37_SONNET]: {
|
|
309
|
+
contextWindow: 200 * 1000,
|
|
310
|
+
maxOutputTokens: 64 * 1000, // Should be 128 * 1000, but Anthropic SDK limits it to 64 * 1000
|
|
311
|
+
imageCostTokens: size8k / 750,
|
|
312
|
+
documentCostTokens: 3000 * 100, // 100 pages: https://docs.anthropic.com/en/docs/build-with-claude/pdf-support
|
|
313
|
+
maxImagePerPrompt: 5, // https://docs.anthropic.com/en/docs/build-with-claude/vision
|
|
314
|
+
maxImageSize: 1092, // by pixels
|
|
315
|
+
maxDocumentPages: 100,
|
|
316
|
+
maxDocumentFile: 1024 * 1024 * 32, // 32MB
|
|
317
|
+
requestLimitsRPM: 50,
|
|
318
|
+
tokenLimitsITPM: 40000,
|
|
319
|
+
tokenLimitsOTPM: 8000,
|
|
320
|
+
trainingData: 'Apr 2024', // ?
|
|
321
|
+
supportedMimeTypes: [
|
|
322
|
+
png, jpeg, gif, webp, pdf,
|
|
323
|
+
],
|
|
324
|
+
},
|
|
282
325
|
};
|
|
283
326
|
|
|
284
327
|
MODELS[CLAUDE_35_HAIKU] = MODELS[CLAUDE_35_SONNET];
|
|
285
328
|
MODELS[DEEPSEEK_R1_32B] = MODELS[DEEPSEEK_R1];
|
|
329
|
+
MODELS[DEEPSEEK_R1_70B] = MODELS[DEEPSEEK_R1];
|
|
286
330
|
|
|
287
331
|
for (const n in MODELS) {
|
|
288
332
|
MODELS[n]['name'] = n;
|
|
@@ -321,10 +365,14 @@ const unifyType = (type, name) => {
|
|
|
321
365
|
const init = async (options) => {
|
|
322
366
|
const provider = unifyProvider(options);
|
|
323
367
|
switch (provider) {
|
|
324
|
-
case OPENAI:
|
|
368
|
+
case OPENAI: case AZURE:
|
|
325
369
|
if (options?.apiKey) {
|
|
326
|
-
|
|
327
|
-
|
|
370
|
+
provider === AZURE && assert(
|
|
371
|
+
options?.baseURL, 'Azure api endpoint is required.'
|
|
372
|
+
);
|
|
373
|
+
const libOpenAI = await need('openai', { raw: true });
|
|
374
|
+
const openai = new (options?.endpoint && options?.deployment
|
|
375
|
+
? libOpenAI.AzureOpenAI : libOpenAI.OpenAI)(options);
|
|
328
376
|
clients[provider] = { client: openai, clientBeta: openai.beta };
|
|
329
377
|
}
|
|
330
378
|
break;
|
|
@@ -354,9 +402,20 @@ const init = async (options) => {
|
|
|
354
402
|
}
|
|
355
403
|
break;
|
|
356
404
|
case CLAUDE:
|
|
357
|
-
if (options?.apiKey) {
|
|
358
|
-
|
|
359
|
-
const
|
|
405
|
+
if (options?.apiKey || (options?.credentials && options?.projectId)) {
|
|
406
|
+
// https://github.com/anthropics/anthropic-sdk-typescript/tree/main/packages/vertex-sdk
|
|
407
|
+
const Anthropic = (await need(options?.credentials
|
|
408
|
+
? '@anthropic-ai/vertex-sdk' : '@anthropic-ai/sdk', { raw: true }))[
|
|
409
|
+
options?.credentials ? 'AnthropicVertex' : 'Anthropic'
|
|
410
|
+
];
|
|
411
|
+
if (options?.credentials) {
|
|
412
|
+
process.env['GOOGLE_APPLICATION_CREDENTIALS'] = options.credentials;
|
|
413
|
+
process.env['ANTHROPIC_VERTEX_PROJECT_ID'] = options.projectId;
|
|
414
|
+
}
|
|
415
|
+
const anthropic = new Anthropic({
|
|
416
|
+
...options?.apiKey ? { apiKey: options.apiKey } : {},
|
|
417
|
+
...options?.credentials ? { region: options?.region || 'us-east5' } : {},
|
|
418
|
+
});
|
|
360
419
|
clients[provider] = { client: anthropic };
|
|
361
420
|
}
|
|
362
421
|
break;
|
|
@@ -592,7 +651,9 @@ const promptChatGPT = async (content, options = {}) => {
|
|
|
592
651
|
// Structured Outputs: https://openai.com/index/introducing-structured-outputs-in-the-api/
|
|
593
652
|
client.baseURL !== OPENAI_BASE_URL
|
|
594
653
|
&& options?.attachments?.length && (options.attachments = []);
|
|
595
|
-
if (options?.model) { } else if (options?.
|
|
654
|
+
if (options?.model) { } else if (options?.provider === AZURE) {
|
|
655
|
+
options.model = DEFAULT_MODELS[AZURE];
|
|
656
|
+
} else if (options?.reasoning) {
|
|
596
657
|
options.model = DEFAULT_MODELS[CHATGPT_REASONING];
|
|
597
658
|
} else {
|
|
598
659
|
options.model = DEFAULT_MODELS[CHATGPT];
|
|
@@ -652,6 +713,10 @@ const promptChatGPT = async (content, options = {}) => {
|
|
|
652
713
|
return await packGptResp(chunk, options);
|
|
653
714
|
};
|
|
654
715
|
|
|
716
|
+
const promptAzure = async (content, options = {}) => await promptChatGPT(
|
|
717
|
+
content, { ...options, provider: AZURE }
|
|
718
|
+
);
|
|
719
|
+
|
|
655
720
|
const promptOllama = async (content, options = {}) => {
|
|
656
721
|
const { client, model } = await getOllamaClient(options);
|
|
657
722
|
// https://github.com/ollama/ollama-js
|
|
@@ -920,7 +985,6 @@ const initChat = async (options) => {
|
|
|
920
985
|
assert(model, `Invalid chat model: '${i}'.`);
|
|
921
986
|
chatConfig.engines[key] = options.engines[i];
|
|
922
987
|
chatConfig.engines[key].model = chatConfig.engines[key].model || model;
|
|
923
|
-
print(chatConfig.engines[key].model);
|
|
924
988
|
const mxPmpt = MODELS[chatConfig.engines[key].model].maxInputTokens / 2;
|
|
925
989
|
const pmptTokens = await countTokens([buildGeminiHistory(
|
|
926
990
|
chatConfig.systemPrompt, { role: system }
|
|
@@ -987,7 +1051,7 @@ const talk = async (input, options) => {
|
|
|
987
1051
|
const session = await getSession(sessionId, { engine, ...options });
|
|
988
1052
|
let [resp, sys, messages, msgBuilder] = [null, [], [], null];
|
|
989
1053
|
switch (engine) {
|
|
990
|
-
case CHATGPT:
|
|
1054
|
+
case CHATGPT: case AZURE:
|
|
991
1055
|
sys.push(buildGptMessage(session.systemPrompt, { role: system }));
|
|
992
1056
|
msgBuilder = () => {
|
|
993
1057
|
messages = [];
|
|
@@ -1069,6 +1133,9 @@ const talk = async (input, options) => {
|
|
|
1069
1133
|
case OLLAMA:
|
|
1070
1134
|
resp = await promptOllama(input, { messages, model, ...options });
|
|
1071
1135
|
break;
|
|
1136
|
+
case AZURE:
|
|
1137
|
+
resp = await promptAzure(input, { messages, model, ...options });
|
|
1138
|
+
break;
|
|
1072
1139
|
}
|
|
1073
1140
|
chat.response = resp.text;
|
|
1074
1141
|
chat?.request && chat?.response && session.messages.push(chat);
|
|
@@ -1189,39 +1256,27 @@ const analyzeSessions = async (sessionIds, options) => {
|
|
|
1189
1256
|
};
|
|
1190
1257
|
|
|
1191
1258
|
const PREFERRED_ENGINES = [
|
|
1192
|
-
{ client: OPENAI, func: promptChatGPT, multimodal:
|
|
1193
|
-
{ client: GEMINI, func: promptGemini, multimodal:
|
|
1259
|
+
{ client: OPENAI, func: promptChatGPT, multimodal: 0 },
|
|
1260
|
+
{ client: GEMINI, func: promptGemini, multimodal: 1 },
|
|
1194
1261
|
{ client: CLAUDE, func: promptClaude, multimodal: 2 },
|
|
1262
|
+
{ client: AZURE, func: promptAzure, multimodal: 3 },
|
|
1195
1263
|
{ client: OLLAMA, func: promptOllama, multimodal: 99 },
|
|
1196
1264
|
]; // keep gpt first to avoid gemini grounding by default
|
|
1197
1265
|
|
|
1198
1266
|
export default init;
|
|
1199
1267
|
export {
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
CODE_INTERPRETER,
|
|
1203
|
-
DEFAULT_MODELS,
|
|
1268
|
+
ATTACHMENT_TOKEN_COST, CLOUD_37_SONNET, CODE_INTERPRETER, DEEPSEEK_R1,
|
|
1269
|
+
DEEPSEEK_R1_32B, DEEPSEEK_R1_70B, DEFAULT_MODELS,
|
|
1204
1270
|
EMBEDDING_001,
|
|
1205
|
-
FUNCTION,
|
|
1206
|
-
GEMINI_20_FLASH_THINKING,
|
|
1207
|
-
GEMINI_20_FLASH,
|
|
1208
|
-
GPT_4O_MINI,
|
|
1209
|
-
GPT_4O,
|
|
1210
|
-
GPT_O3_MINI,
|
|
1211
|
-
GPT_O1,
|
|
1212
|
-
DEEPSEEK_R1,
|
|
1213
|
-
DEEPSEEK_R1_32B,
|
|
1214
|
-
MODELS,
|
|
1271
|
+
FUNCTION, GEMINI_20_FLASH, GEMINI_20_FLASH_THINKING, GPT_4O, GPT_4O_MINI, GPT_O1, GPT_O3_MINI, MODELS,
|
|
1215
1272
|
OPENAI_VOICE,
|
|
1216
1273
|
RETRIEVAL,
|
|
1217
|
-
TEXT_EMBEDDING_3_SMALL,
|
|
1218
|
-
analyzeSessions,
|
|
1274
|
+
TEXT_EMBEDDING_3_SMALL, _NEED, analyzeSessions,
|
|
1219
1275
|
buildGptTrainingCase,
|
|
1220
1276
|
buildGptTrainingCases,
|
|
1221
1277
|
cancelGptFineTuningJob,
|
|
1222
1278
|
countTokens,
|
|
1223
|
-
createGeminiEmbedding,
|
|
1224
|
-
createGptFineTuningJob,
|
|
1279
|
+
createGeminiEmbedding, createGptFineTuningJob,
|
|
1225
1280
|
createOpenAIEmbedding,
|
|
1226
1281
|
deleteFile,
|
|
1227
1282
|
distillFile,
|
|
@@ -1236,8 +1291,7 @@ export {
|
|
|
1236
1291
|
listGptFineTuningJobs,
|
|
1237
1292
|
listOpenAIModels,
|
|
1238
1293
|
ogg,
|
|
1239
|
-
prompt,
|
|
1240
|
-
promptChatGPT,
|
|
1294
|
+
prompt, promptAzure, promptChatGPT,
|
|
1241
1295
|
promptClaude,
|
|
1242
1296
|
promptGemini,
|
|
1243
1297
|
promptOllama,
|
|
@@ -1247,5 +1301,5 @@ export {
|
|
|
1247
1301
|
trimPrompt,
|
|
1248
1302
|
uploadFile,
|
|
1249
1303
|
uploadFileForFineTuning,
|
|
1250
|
-
wav
|
|
1304
|
+
wav
|
|
1251
1305
|
};
|
package/lib/manifest.mjs
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
const manifest = {
|
|
2
2
|
"name": "utilitas",
|
|
3
3
|
"description": "Just another common utility for JavaScript.",
|
|
4
|
-
"version": "1998.2.
|
|
4
|
+
"version": "1998.2.19",
|
|
5
5
|
"private": false,
|
|
6
6
|
"homepage": "https://github.com/Leask/utilitas",
|
|
7
7
|
"main": "index.mjs",
|
|
@@ -25,6 +25,7 @@ const manifest = {
|
|
|
25
25
|
},
|
|
26
26
|
"devDependencies": {
|
|
27
27
|
"@anthropic-ai/sdk": "^0.36.3",
|
|
28
|
+
"@anthropic-ai/vertex-sdk": "^0.7.0",
|
|
28
29
|
"@ffmpeg-installer/ffmpeg": "^1.1.0",
|
|
29
30
|
"@ffprobe-installer/ffprobe": "^2.1.2",
|
|
30
31
|
"@google-cloud/speech": "^6.7.0",
|
package/lib/storage.mjs
CHANGED
|
@@ -1,16 +1,17 @@
|
|
|
1
1
|
import {
|
|
2
|
+
log as _log,
|
|
2
3
|
base64Decode, base64Encode, ensureString, extract, ignoreErrFunc,
|
|
3
|
-
|
|
4
|
+
mergeAtoB, need, throwError, trim, voidFunc, which,
|
|
4
5
|
} from './utilitas.mjs';
|
|
5
6
|
|
|
6
|
-
import { basename, extname, join } from 'path';
|
|
7
|
-
import { constants as consts, createReadStream, promises as fs, readSync } from 'fs';
|
|
8
|
-
import { defaultAlgorithm, hash } from './encryption.mjs';
|
|
9
|
-
import { deflate as __zip, unzip as __unzip } from 'zlib';
|
|
10
7
|
import { fileTypeFromBuffer } from 'file-type';
|
|
8
|
+
import { constants as consts, createReadStream, promises as fs, readSync } from 'fs';
|
|
11
9
|
import { homedir, tmpdir } from 'os';
|
|
10
|
+
import { basename, extname, join } from 'path';
|
|
12
11
|
import { promisify } from 'util';
|
|
13
12
|
import { v4 as uuidv4 } from 'uuid';
|
|
13
|
+
import { unzip as __unzip, deflate as __zip } from 'zlib';
|
|
14
|
+
import { defaultAlgorithm, hash } from './encryption.mjs';
|
|
14
15
|
|
|
15
16
|
const _NEED = ['file-type', 'mime-types', '@google-cloud/storage'];
|
|
16
17
|
const errorMessage = 'Invalid file.';
|
|
@@ -467,26 +468,15 @@ const deleteOnCloud = async (path, options) => {
|
|
|
467
468
|
};
|
|
468
469
|
|
|
469
470
|
export {
|
|
470
|
-
_NEED,
|
|
471
|
-
BASE64,
|
|
472
|
-
BUFFER,
|
|
473
|
-
DATAURL,
|
|
474
|
-
FILE,
|
|
475
|
-
MIME_BINARY,
|
|
476
|
-
STREAM,
|
|
477
|
-
analyzeFile,
|
|
478
|
-
assertPath,
|
|
479
|
-
blobToBuffer,
|
|
480
|
-
convert,
|
|
481
|
-
decodeBase64DataURL,
|
|
471
|
+
_NEED, analyzeFile,
|
|
472
|
+
assertPath, BASE64, blobToBuffer, BUFFER, convert, DATAURL, decodeBase64DataURL,
|
|
482
473
|
deleteFileOnCloud,
|
|
483
474
|
deleteOnCloud,
|
|
484
475
|
downloadFileFromCloud,
|
|
485
476
|
downloadFromCloud,
|
|
486
477
|
encodeBase64DataURL,
|
|
487
478
|
exists,
|
|
488
|
-
existsOnCloud,
|
|
489
|
-
getConfig,
|
|
479
|
+
existsOnCloud, FILE, getConfig,
|
|
490
480
|
getConfigFilename,
|
|
491
481
|
getGcUrlByBucket,
|
|
492
482
|
getIdByGs,
|
|
@@ -497,18 +487,16 @@ export {
|
|
|
497
487
|
legalFilename,
|
|
498
488
|
lsOnCloud,
|
|
499
489
|
mapFilename,
|
|
500
|
-
mergeFile,
|
|
501
|
-
readFile,
|
|
490
|
+
mergeFile, MIME_BINARY, readFile,
|
|
502
491
|
readJson,
|
|
503
492
|
sanitizeFilename,
|
|
504
493
|
setConfig,
|
|
505
|
-
sliceFile,
|
|
506
|
-
touchPath,
|
|
494
|
+
sliceFile, STREAM, touchPath,
|
|
507
495
|
tryRm,
|
|
508
496
|
unzip,
|
|
509
497
|
uploadToCloud,
|
|
510
498
|
writeFile,
|
|
511
499
|
writeJson,
|
|
512
500
|
writeTempFile,
|
|
513
|
-
zip
|
|
501
|
+
zip
|
|
514
502
|
};
|
package/lib/vision.mjs
CHANGED
|
@@ -3,14 +3,16 @@ import {
|
|
|
3
3
|
} from './storage.mjs';
|
|
4
4
|
|
|
5
5
|
import {
|
|
6
|
-
|
|
6
|
+
log as _log,
|
|
7
|
+
ensureArray, ensureString, ignoreErrFunc,
|
|
8
|
+
need, throwError,
|
|
7
9
|
trim,
|
|
8
10
|
} from './utilitas.mjs';
|
|
9
11
|
|
|
10
|
-
import { v4 as uuidv4 } from 'uuid';
|
|
11
|
-
import { getApiKeyCredentials } from './encryption.mjs';
|
|
12
12
|
import fs from 'node:fs';
|
|
13
13
|
import path from 'node:path';
|
|
14
|
+
import { v4 as uuidv4 } from 'uuid';
|
|
15
|
+
import { getApiKeyCredentials } from './encryption.mjs';
|
|
14
16
|
|
|
15
17
|
const _NEED = [
|
|
16
18
|
'@google-cloud/vision', 'office-text-extractor', 'pdfjs-dist',
|
|
@@ -268,5 +270,5 @@ export {
|
|
|
268
270
|
parseOfficeFile,
|
|
269
271
|
read,
|
|
270
272
|
readAll,
|
|
271
|
-
see
|
|
273
|
+
see
|
|
272
274
|
};
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "utilitas",
|
|
3
3
|
"description": "Just another common utility for JavaScript.",
|
|
4
|
-
"version": "1998.2.
|
|
4
|
+
"version": "1998.2.19",
|
|
5
5
|
"private": false,
|
|
6
6
|
"homepage": "https://github.com/Leask/utilitas",
|
|
7
7
|
"main": "index.mjs",
|
|
@@ -36,6 +36,7 @@
|
|
|
36
36
|
},
|
|
37
37
|
"devDependencies": {
|
|
38
38
|
"@anthropic-ai/sdk": "^0.36.3",
|
|
39
|
+
"@anthropic-ai/vertex-sdk": "^0.7.0",
|
|
39
40
|
"@ffmpeg-installer/ffmpeg": "^1.1.0",
|
|
40
41
|
"@ffprobe-installer/ffprobe": "^2.1.2",
|
|
41
42
|
"@google-cloud/speech": "^6.7.0",
|