utilitas 1999.1.58 → 1999.1.60
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -3
- package/dist/utilitas.lite.mjs +1 -1
- package/dist/utilitas.lite.mjs.map +1 -1
- package/lib/gen.mjs +82 -13
- package/lib/manifest.mjs +1 -1
- package/package.json +1 -1
package/lib/gen.mjs
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
import {
|
|
2
|
-
ensureString, log as _log, need, throwError, tryUntil,
|
|
2
|
+
ensureArray, ensureString, log as _log, need, throwError, tryUntil,
|
|
3
3
|
} from './utilitas.mjs';
|
|
4
4
|
|
|
5
|
-
import { convert, MIME_PNG } from './storage.mjs';
|
|
6
5
|
import { assertCommand, exec } from './shell.mjs';
|
|
6
|
+
import { convert, MIME_PNG } from './storage.mjs';
|
|
7
|
+
import { createReadStream } from 'fs';
|
|
7
8
|
|
|
8
9
|
const _NEED = ['OpenAI'];
|
|
9
10
|
const log = (cnt, opt) => _log(cnt, import.meta.url, { time: 1, ...opt || {} });
|
|
@@ -25,7 +26,10 @@ const init = async (options) => {
|
|
|
25
26
|
case OPENAI:
|
|
26
27
|
const OpenAI = await need('openai');
|
|
27
28
|
const openai = new OpenAI(options);
|
|
28
|
-
clients[provider] =
|
|
29
|
+
clients[provider] = {
|
|
30
|
+
image: openai.images,
|
|
31
|
+
toFile: OpenAI.toFile,
|
|
32
|
+
};
|
|
29
33
|
break;
|
|
30
34
|
case GEMINI:
|
|
31
35
|
clients[provider] = {
|
|
@@ -48,7 +52,20 @@ const extractVideo = async (data, options) => await convert(
|
|
|
48
52
|
data, { input: BASE64, suffix: 'mp4', ...options || {} }
|
|
49
53
|
);
|
|
50
54
|
|
|
51
|
-
const
|
|
55
|
+
const prepareImage = async (files, repack, options) => {
|
|
56
|
+
if (!files) { return }
|
|
57
|
+
const multiple = Array.isArray(files);
|
|
58
|
+
files = ensureArray(files);
|
|
59
|
+
const resp = await Promise.all(files.map(async x => await repack(
|
|
60
|
+
createReadStream(await convert(
|
|
61
|
+
x, { expected: 'FILE', ...options || {} }
|
|
62
|
+
)), null, { type: MIME_PNG } // don't need to be right MIME type
|
|
63
|
+
)));
|
|
64
|
+
return multiple ? resp : resp[0];
|
|
65
|
+
};
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
const image = async (prompt, options) => {
|
|
52
69
|
let provider = ensureString(options?.provider, { case: 'UP' });
|
|
53
70
|
if (!provider && clients?.[GEMINI]?.apiKey) { provider = GEMINI; }
|
|
54
71
|
if (!provider && clients?.[OPENAI]) { provider = OPENAI; }
|
|
@@ -64,33 +81,86 @@ const generateImage = async (prompt, options) => {
|
|
|
64
81
|
};
|
|
65
82
|
switch (provider) {
|
|
66
83
|
case OPENAI:
|
|
84
|
+
let [func, extraOptions] = ['generate', {}];
|
|
85
|
+
if (options?.reference || options?.mask) {
|
|
86
|
+
func = 'edit';
|
|
87
|
+
extraOptions = {
|
|
88
|
+
image: await prepareImage(options?.reference, client.toFile, options),
|
|
89
|
+
mask: await prepareImage(options?.mask, client.toFile, options),
|
|
90
|
+
};
|
|
91
|
+
}
|
|
67
92
|
try { // https://platform.openai.com/docs/guides/image-generation?image-generation-model=gpt-image-1
|
|
68
|
-
var resp = await client.
|
|
93
|
+
var resp = await client.image[func]({
|
|
69
94
|
prompt, model: OPENAI_MODEL, n, quality: 'high',
|
|
70
95
|
size: '1536x1024', moderation: 'low',
|
|
71
96
|
// 1024x1024 (square), 1536x1024 (landscape), 1024x1536 (portrait), auto (default)
|
|
72
97
|
// background: 'transparent',
|
|
73
|
-
...options?.params || {},
|
|
98
|
+
...extraOptions, ...options?.params || {},
|
|
74
99
|
});
|
|
75
100
|
} catch (err) { throwError(err?.message || ERROR_GENERATING); }
|
|
76
101
|
if (!options?.raw) {
|
|
77
102
|
resp.data = await Promise.all(resp.data.map(async x => ({
|
|
78
103
|
caption: `🎨 by ${OPENAI_MODEL}`,
|
|
79
|
-
data: await extractImage(x.b64_json,
|
|
104
|
+
data: await extractImage(x.b64_json, {
|
|
105
|
+
...options || {}, input: BASE64,
|
|
106
|
+
}),
|
|
80
107
|
mimeType: MIME_PNG,
|
|
81
108
|
})));
|
|
82
109
|
}
|
|
83
110
|
return resp?.data;
|
|
84
111
|
case GEMINI:
|
|
112
|
+
// Image editing failed with the following error: imagen-3.0-capability-001 is unavailable.
|
|
113
|
+
// @todo: https://cloud.google.com/vertex-ai/generative-ai/docs/image/overview#feature-launch-stage
|
|
114
|
+
// cat << EOF > request.json
|
|
115
|
+
// {
|
|
116
|
+
// "endpoint": "projects/backend-alpha-97077/locations/us-central1/publishers/google/models/imagen-3.0-capability-001",
|
|
117
|
+
// "instances": [
|
|
118
|
+
// {
|
|
119
|
+
// "prompt": "ENTER PROMPT HERE",
|
|
120
|
+
// "referenceImages": [
|
|
121
|
+
// {
|
|
122
|
+
// "referenceId": 1,
|
|
123
|
+
// "referenceType": "REFERENCE_TYPE_SUBJECT",
|
|
124
|
+
// "referenceImage": {
|
|
125
|
+
// "bytesBase64Encoded":
|
|
126
|
+
// },
|
|
127
|
+
// "subjectImageConfig" {
|
|
128
|
+
// "subjectDescription": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
|
|
129
|
+
// "subjectType": "SUBJECT_TYPE_DEFAULT"
|
|
130
|
+
// }
|
|
131
|
+
// }
|
|
132
|
+
// ],
|
|
133
|
+
// }
|
|
134
|
+
// ],
|
|
135
|
+
// "parameters": {
|
|
136
|
+
// "aspectRatio": "1:1",
|
|
137
|
+
// "sampleCount": 4,
|
|
138
|
+
// "negativePrompt": "",
|
|
139
|
+
// "enhancePrompt": false,
|
|
140
|
+
// "personGeneration": "",
|
|
141
|
+
// "safetySetting": "",
|
|
142
|
+
// "addWatermark": true,
|
|
143
|
+
// "includeRaiReason": true,
|
|
144
|
+
// "language": "auto",
|
|
145
|
+
// }
|
|
146
|
+
// }
|
|
147
|
+
// curl \
|
|
148
|
+
// -X POST \
|
|
149
|
+
// -H "Content-Type: application/json" \
|
|
150
|
+
// -H "Authorization: Bearer $(gcloud auth print-access-token)" \
|
|
151
|
+
// "https://${API_ENDPOINT}/v1/projects/${PROJECT_ID}/locations/${LOCATION_ID}/publishers/google/models/${MODEL_ID}:predict" -d '@request.json'
|
|
152
|
+
// ARGs: https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/imagen-api?authuser=4#rest_1
|
|
85
153
|
var resp = await (await fetch(
|
|
86
154
|
'https://generativelanguage.googleapis.com/v1beta/models/'
|
|
87
155
|
+ `${IMAGEN_MODEL}:predict?key=${client.apiKey}`, {
|
|
88
156
|
method: 'POST', headers: { 'Content-Type': 'application/json' },
|
|
89
157
|
body: JSON.stringify({
|
|
90
158
|
instances: [{ prompt }], parameters: {
|
|
91
|
-
|
|
159
|
+
// "1:1" (default), "3:4", "4:3", "9:16", and "16:9"
|
|
160
|
+
aspectRatio: '16:9', includeRaiReason: true,
|
|
161
|
+
personGeneration: 'allow_adult', sampleCount: n,
|
|
92
162
|
...options?.params || {},
|
|
93
|
-
},
|
|
163
|
+
},
|
|
94
164
|
})
|
|
95
165
|
})).json();
|
|
96
166
|
assert(!resp?.error, resp?.error?.message || ERROR_GENERATING);
|
|
@@ -140,7 +210,7 @@ const getGeminiVideo = async (jobId, accessToken) => {
|
|
|
140
210
|
return resp?.response?.videos;
|
|
141
211
|
};
|
|
142
212
|
|
|
143
|
-
const
|
|
213
|
+
const video = async (prompt, options) => {
|
|
144
214
|
let provider = ensureString(options?.provider, { case: 'UP' });
|
|
145
215
|
if (!provider
|
|
146
216
|
&& clients?.[GEMINI]?.credentials
|
|
@@ -198,8 +268,7 @@ const generateVideo = async (prompt, options) => {
|
|
|
198
268
|
export default init;
|
|
199
269
|
export {
|
|
200
270
|
_NEED,
|
|
201
|
-
|
|
202
|
-
generateVideo,
|
|
203
|
-
getGeminiVideo,
|
|
271
|
+
image,
|
|
204
272
|
init,
|
|
273
|
+
video,
|
|
205
274
|
};
|
package/lib/manifest.mjs
CHANGED