@workglow/tasks 0.2.17 → 0.2.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser.d.ts +2 -2
- package/dist/browser.d.ts.map +1 -1
- package/dist/browser.js +1636 -1287
- package/dist/browser.js.map +58 -26
- package/dist/bun.d.ts +2 -2
- package/dist/bun.d.ts.map +1 -1
- package/dist/bun.js +1382 -1318
- package/dist/bun.js.map +56 -26
- package/dist/codec.browser.d.ts +39 -0
- package/dist/codec.browser.d.ts.map +1 -0
- package/dist/codec.node.d.ts +37 -0
- package/dist/codec.node.d.ts.map +1 -0
- package/dist/common.d.ts +39 -38
- package/dist/common.d.ts.map +1 -1
- package/dist/electron.d.ts +2 -2
- package/dist/electron.d.ts.map +1 -1
- package/dist/electron.js +1382 -1318
- package/dist/electron.js.map +56 -26
- package/dist/node.d.ts +2 -2
- package/dist/node.d.ts.map +1 -1
- package/dist/node.js +1382 -1318
- package/dist/node.js.map +56 -26
- package/dist/task/image/ImageFilterTask.d.ts +24 -0
- package/dist/task/image/ImageFilterTask.d.ts.map +1 -0
- package/dist/task/image/ImageSchemas.d.ts +15 -101
- package/dist/task/image/ImageSchemas.d.ts.map +1 -1
- package/dist/task/image/blur/ImageBlurTask.d.ts +29 -0
- package/dist/task/image/blur/ImageBlurTask.d.ts.map +1 -0
- package/dist/task/image/blur/blur.cpu.d.ts +4 -0
- package/dist/task/image/blur/blur.cpu.d.ts.map +1 -0
- package/dist/task/image/blur/blur.sharp.d.ts +2 -0
- package/dist/task/image/blur/blur.sharp.d.ts.map +1 -0
- package/dist/task/image/blur/blur.webgpu.d.ts +2 -0
- package/dist/task/image/blur/blur.webgpu.d.ts.map +1 -0
- package/dist/task/image/border/ImageBorderTask.d.ts +35 -0
- package/dist/task/image/border/ImageBorderTask.d.ts.map +1 -0
- package/dist/task/image/border/border.cpu.d.ts +10 -0
- package/dist/task/image/border/border.cpu.d.ts.map +1 -0
- package/dist/task/image/border/border.sharp.d.ts +2 -0
- package/dist/task/image/border/border.sharp.d.ts.map +1 -0
- package/dist/task/image/border/border.webgpu.d.ts +2 -0
- package/dist/task/image/border/border.webgpu.d.ts.map +1 -0
- package/dist/task/image/brightness/ImageBrightnessTask.d.ts +28 -0
- package/dist/task/image/brightness/ImageBrightnessTask.d.ts.map +1 -0
- package/dist/task/image/brightness/brightness.cpu.d.ts +4 -0
- package/dist/task/image/brightness/brightness.cpu.d.ts.map +1 -0
- package/dist/task/image/brightness/brightness.sharp.d.ts +2 -0
- package/dist/task/image/brightness/brightness.sharp.d.ts.map +1 -0
- package/dist/task/image/brightness/brightness.webgpu.d.ts +2 -0
- package/dist/task/image/brightness/brightness.webgpu.d.ts.map +1 -0
- package/dist/task/image/contrast/ImageContrastTask.d.ts +28 -0
- package/dist/task/image/contrast/ImageContrastTask.d.ts.map +1 -0
- package/dist/task/image/contrast/contrast.cpu.d.ts +4 -0
- package/dist/task/image/contrast/contrast.cpu.d.ts.map +1 -0
- package/dist/task/image/contrast/contrast.sharp.d.ts +2 -0
- package/dist/task/image/contrast/contrast.sharp.d.ts.map +1 -0
- package/dist/task/image/contrast/contrast.webgpu.d.ts +2 -0
- package/dist/task/image/contrast/contrast.webgpu.d.ts.map +1 -0
- package/dist/task/image/crop/ImageCropTask.d.ts +32 -0
- package/dist/task/image/crop/ImageCropTask.d.ts.map +1 -0
- package/dist/task/image/crop/crop.cpu.d.ts +7 -0
- package/dist/task/image/crop/crop.cpu.d.ts.map +1 -0
- package/dist/task/image/crop/crop.sharp.d.ts +2 -0
- package/dist/task/image/crop/crop.sharp.d.ts.map +1 -0
- package/dist/task/image/crop/crop.webgpu.d.ts +2 -0
- package/dist/task/image/crop/crop.webgpu.d.ts.map +1 -0
- package/dist/task/image/flip/ImageFlipTask.d.ts +28 -0
- package/dist/task/image/flip/ImageFlipTask.d.ts.map +1 -0
- package/dist/task/image/flip/flip.cpu.d.ts +4 -0
- package/dist/task/image/flip/flip.cpu.d.ts.map +1 -0
- package/dist/task/image/flip/flip.sharp.d.ts +2 -0
- package/dist/task/image/flip/flip.sharp.d.ts.map +1 -0
- package/dist/task/image/flip/flip.webgpu.d.ts +2 -0
- package/dist/task/image/flip/flip.webgpu.d.ts.map +1 -0
- package/dist/task/image/grayscale/ImageGrayscaleTask.d.ts +25 -0
- package/dist/task/image/grayscale/ImageGrayscaleTask.d.ts.map +1 -0
- package/dist/task/image/grayscale/grayscale.cpu.d.ts +2 -0
- package/dist/task/image/grayscale/grayscale.cpu.d.ts.map +1 -0
- package/dist/task/image/grayscale/grayscale.sharp.d.ts +2 -0
- package/dist/task/image/grayscale/grayscale.sharp.d.ts.map +1 -0
- package/dist/task/image/grayscale/grayscale.webgpu.d.ts +2 -0
- package/dist/task/image/grayscale/grayscale.webgpu.d.ts.map +1 -0
- package/dist/task/image/imageCodecLimits.d.ts +8 -4
- package/dist/task/image/imageCodecLimits.d.ts.map +1 -1
- package/dist/task/image/invert/ImageInvertTask.d.ts +25 -0
- package/dist/task/image/invert/ImageInvertTask.d.ts.map +1 -0
- package/dist/task/image/invert/invert.cpu.d.ts +2 -0
- package/dist/task/image/invert/invert.cpu.d.ts.map +1 -0
- package/dist/task/image/invert/invert.sharp.d.ts +2 -0
- package/dist/task/image/invert/invert.sharp.d.ts.map +1 -0
- package/dist/task/image/invert/invert.webgpu.d.ts +2 -0
- package/dist/task/image/invert/invert.webgpu.d.ts.map +1 -0
- package/dist/task/image/pixelate/ImagePixelateTask.d.ts +29 -0
- package/dist/task/image/pixelate/ImagePixelateTask.d.ts.map +1 -0
- package/dist/task/image/pixelate/pixelate.cpu.d.ts +4 -0
- package/dist/task/image/pixelate/pixelate.cpu.d.ts.map +1 -0
- package/dist/task/image/pixelate/pixelate.sharp.d.ts +2 -0
- package/dist/task/image/pixelate/pixelate.sharp.d.ts.map +1 -0
- package/dist/task/image/pixelate/pixelate.webgpu.d.ts +2 -0
- package/dist/task/image/pixelate/pixelate.webgpu.d.ts.map +1 -0
- package/dist/task/image/posterize/ImagePosterizeTask.d.ts +28 -0
- package/dist/task/image/posterize/ImagePosterizeTask.d.ts.map +1 -0
- package/dist/task/image/posterize/posterize.cpu.d.ts +4 -0
- package/dist/task/image/posterize/posterize.cpu.d.ts.map +1 -0
- package/dist/task/image/posterize/posterize.webgpu.d.ts +2 -0
- package/dist/task/image/posterize/posterize.webgpu.d.ts.map +1 -0
- package/dist/task/image/resize/ImageResizeTask.d.ts +32 -0
- package/dist/task/image/resize/ImageResizeTask.d.ts.map +1 -0
- package/dist/task/image/resize/resize.cpu.d.ts +7 -0
- package/dist/task/image/resize/resize.cpu.d.ts.map +1 -0
- package/dist/task/image/resize/resize.sharp.d.ts +2 -0
- package/dist/task/image/resize/resize.sharp.d.ts.map +1 -0
- package/dist/task/image/resize/resize.webgpu.d.ts +2 -0
- package/dist/task/image/resize/resize.webgpu.d.ts.map +1 -0
- package/dist/task/image/rotate/ImageRotateTask.d.ts +29 -0
- package/dist/task/image/rotate/ImageRotateTask.d.ts.map +1 -0
- package/dist/task/image/rotate/rotate.cpu.d.ts +5 -0
- package/dist/task/image/rotate/rotate.cpu.d.ts.map +1 -0
- package/dist/task/image/rotate/rotate.sharp.d.ts +2 -0
- package/dist/task/image/rotate/rotate.sharp.d.ts.map +1 -0
- package/dist/task/image/rotate/rotate.webgpu.d.ts +2 -0
- package/dist/task/image/rotate/rotate.webgpu.d.ts.map +1 -0
- package/dist/task/image/sepia/ImageSepiaTask.d.ts +25 -0
- package/dist/task/image/sepia/ImageSepiaTask.d.ts.map +1 -0
- package/dist/task/image/sepia/sepia.cpu.d.ts +2 -0
- package/dist/task/image/sepia/sepia.cpu.d.ts.map +1 -0
- package/dist/task/image/sepia/sepia.sharp.d.ts +2 -0
- package/dist/task/image/sepia/sepia.sharp.d.ts.map +1 -0
- package/dist/task/image/sepia/sepia.webgpu.d.ts +2 -0
- package/dist/task/image/sepia/sepia.webgpu.d.ts.map +1 -0
- package/dist/task/image/{ImageTextTask.d.ts → text/ImageTextTask.d.ts} +15 -102
- package/dist/task/image/text/ImageTextTask.d.ts.map +1 -0
- package/dist/task/image/threshold/ImageThresholdTask.d.ts +28 -0
- package/dist/task/image/threshold/ImageThresholdTask.d.ts.map +1 -0
- package/dist/task/image/threshold/threshold.cpu.d.ts +4 -0
- package/dist/task/image/threshold/threshold.cpu.d.ts.map +1 -0
- package/dist/task/image/threshold/threshold.sharp.d.ts +2 -0
- package/dist/task/image/threshold/threshold.sharp.d.ts.map +1 -0
- package/dist/task/image/threshold/threshold.webgpu.d.ts +2 -0
- package/dist/task/image/threshold/threshold.webgpu.d.ts.map +1 -0
- package/dist/task/image/tint/ImageTintTask.d.ts +30 -0
- package/dist/task/image/tint/ImageTintTask.d.ts.map +1 -0
- package/dist/task/image/tint/tint.cpu.d.ts +11 -0
- package/dist/task/image/tint/tint.cpu.d.ts.map +1 -0
- package/dist/task/image/tint/tint.sharp.d.ts +2 -0
- package/dist/task/image/tint/tint.sharp.d.ts.map +1 -0
- package/dist/task/image/tint/tint.webgpu.d.ts +2 -0
- package/dist/task/image/tint/tint.webgpu.d.ts.map +1 -0
- package/dist/task/image/transparency/ImageTransparencyTask.d.ts +28 -0
- package/dist/task/image/transparency/ImageTransparencyTask.d.ts.map +1 -0
- package/dist/task/image/transparency/transparency.cpu.d.ts +4 -0
- package/dist/task/image/transparency/transparency.cpu.d.ts.map +1 -0
- package/dist/task/image/transparency/transparency.webgpu.d.ts +2 -0
- package/dist/task/image/transparency/transparency.webgpu.d.ts.map +1 -0
- package/package.json +9 -9
- package/dist/task/image/ImageBlurTask.d.ts +0 -248
- package/dist/task/image/ImageBlurTask.d.ts.map +0 -1
- package/dist/task/image/ImageBorderTask.d.ts +0 -328
- package/dist/task/image/ImageBorderTask.d.ts.map +0 -1
- package/dist/task/image/ImageBrightnessTask.d.ts +0 -248
- package/dist/task/image/ImageBrightnessTask.d.ts.map +0 -1
- package/dist/task/image/ImageContrastTask.d.ts +0 -248
- package/dist/task/image/ImageContrastTask.d.ts.map +0 -1
- package/dist/task/image/ImageCropTask.d.ts +0 -280
- package/dist/task/image/ImageCropTask.d.ts.map +0 -1
- package/dist/task/image/ImageFlipTask.d.ts +0 -244
- package/dist/task/image/ImageFlipTask.d.ts.map +0 -1
- package/dist/task/image/ImageGrayscaleTask.d.ts +0 -232
- package/dist/task/image/ImageGrayscaleTask.d.ts.map +0 -1
- package/dist/task/image/ImageInvertTask.d.ts +0 -232
- package/dist/task/image/ImageInvertTask.d.ts.map +0 -1
- package/dist/task/image/ImagePixelateTask.d.ts +0 -246
- package/dist/task/image/ImagePixelateTask.d.ts.map +0 -1
- package/dist/task/image/ImagePosterizeTask.d.ts +0 -248
- package/dist/task/image/ImagePosterizeTask.d.ts.map +0 -1
- package/dist/task/image/ImageResizeTask.d.ts +0 -256
- package/dist/task/image/ImageResizeTask.d.ts.map +0 -1
- package/dist/task/image/ImageRotateTask.d.ts +0 -244
- package/dist/task/image/ImageRotateTask.d.ts.map +0 -1
- package/dist/task/image/ImageSepiaTask.d.ts +0 -232
- package/dist/task/image/ImageSepiaTask.d.ts.map +0 -1
- package/dist/task/image/ImageTextTask.d.ts.map +0 -1
- package/dist/task/image/ImageThresholdTask.d.ts +0 -248
- package/dist/task/image/ImageThresholdTask.d.ts.map +0 -1
- package/dist/task/image/ImageTintTask.d.ts +0 -330
- package/dist/task/image/ImageTintTask.d.ts.map +0 -1
- package/dist/task/image/ImageTransparencyTask.d.ts +0 -246
- package/dist/task/image/ImageTransparencyTask.d.ts.map +0 -1
- package/dist/task/image/ImageWatermarkTask.d.ts +0 -276
- package/dist/task/image/ImageWatermarkTask.d.ts.map +0 -1
- package/dist/task/image/imageTaskIo.d.ts +0 -20
- package/dist/task/image/imageTaskIo.d.ts.map +0 -1
package/dist/browser.js
CHANGED
|
@@ -12,7 +12,7 @@ import { registerImageRasterCodec } from "@workglow/util/media";
|
|
|
12
12
|
// src/task/image/imageCodecLimits.ts
|
|
13
13
|
var MAX_DECODED_PIXELS = 1e8;
|
|
14
14
|
var MAX_INPUT_BYTES_NODE = 64 * 1024 * 1024;
|
|
15
|
-
var MAX_INPUT_BYTES_BROWSER =
|
|
15
|
+
var MAX_INPUT_BYTES_BROWSER = 32 * 1024 * 1024;
|
|
16
16
|
var REJECTED_DECODE_MIME_TYPES = new Set([
|
|
17
17
|
"image/svg+xml",
|
|
18
18
|
"image/svg",
|
|
@@ -176,6 +176,890 @@ function createBrowserImageRasterCodec() {
|
|
|
176
176
|
// src/task/image/registerImageRasterCodec.browser.ts
|
|
177
177
|
registerImageRasterCodec(createBrowserImageRasterCodec());
|
|
178
178
|
|
|
179
|
+
// src/task/image/blur/blur.cpu.ts
|
|
180
|
+
import { CpuImage, registerFilterOp } from "@workglow/util/media";
|
|
181
|
+
function cpuBoxBlur(bin, radius) {
|
|
182
|
+
const { data: src, width, height, channels } = bin;
|
|
183
|
+
const kernelSize = radius * 2 + 1;
|
|
184
|
+
const tmp = new Uint8ClampedArray(src.length);
|
|
185
|
+
for (let y = 0;y < height; y++) {
|
|
186
|
+
for (let c = 0;c < channels; c++) {
|
|
187
|
+
let sum = 0;
|
|
188
|
+
for (let k = -radius;k <= radius; k++) {
|
|
189
|
+
const x = Math.max(0, Math.min(k, width - 1));
|
|
190
|
+
sum += src[(y * width + x) * channels + c];
|
|
191
|
+
}
|
|
192
|
+
tmp[y * width * channels + c] = sum / kernelSize + 0.5 | 0;
|
|
193
|
+
for (let x = 1;x < width; x++) {
|
|
194
|
+
const addX = Math.min(x + radius, width - 1);
|
|
195
|
+
const removeX = Math.max(x - radius - 1, 0);
|
|
196
|
+
sum += src[(y * width + addX) * channels + c] - src[(y * width + removeX) * channels + c];
|
|
197
|
+
tmp[(y * width + x) * channels + c] = sum / kernelSize + 0.5 | 0;
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
const dst = new Uint8ClampedArray(src.length);
|
|
202
|
+
for (let x = 0;x < width; x++) {
|
|
203
|
+
for (let c = 0;c < channels; c++) {
|
|
204
|
+
let sum = 0;
|
|
205
|
+
for (let k = -radius;k <= radius; k++) {
|
|
206
|
+
const y = Math.max(0, Math.min(k, height - 1));
|
|
207
|
+
sum += tmp[(y * width + x) * channels + c];
|
|
208
|
+
}
|
|
209
|
+
dst[x * channels + c] = sum / kernelSize + 0.5 | 0;
|
|
210
|
+
for (let y = 1;y < height; y++) {
|
|
211
|
+
const addY = Math.min(y + radius, height - 1);
|
|
212
|
+
const removeY = Math.max(y - radius - 1, 0);
|
|
213
|
+
sum += tmp[(addY * width + x) * channels + c] - tmp[(removeY * width + x) * channels + c];
|
|
214
|
+
dst[(y * width + x) * channels + c] = sum / kernelSize + 0.5 | 0;
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
return { data: dst, width, height, channels };
|
|
219
|
+
}
|
|
220
|
+
registerFilterOp("cpu", "blur", (image, { radius }) => {
|
|
221
|
+
return CpuImage.fromImageBinary(cpuBoxBlur(image.getBinary(), Math.max(1, radius | 0)));
|
|
222
|
+
});
|
|
223
|
+
|
|
224
|
+
// src/task/image/border/border.cpu.ts
|
|
225
|
+
import { CpuImage as CpuImage2, registerFilterOp as registerFilterOp2, resolveColor } from "@workglow/util/media";
|
|
226
|
+
function cpuBorder(bin, borderWidth, color) {
|
|
227
|
+
const { data: src, width: srcW, height: srcH, channels: srcCh } = bin;
|
|
228
|
+
const bw = borderWidth;
|
|
229
|
+
const resolved = resolveColor(color);
|
|
230
|
+
const outCh = 4;
|
|
231
|
+
const dstW = srcW + bw * 2;
|
|
232
|
+
const dstH = srcH + bw * 2;
|
|
233
|
+
const dst = new Uint8ClampedArray(dstW * dstH * outCh);
|
|
234
|
+
const r = resolved.r;
|
|
235
|
+
const g = resolved.g;
|
|
236
|
+
const b = resolved.b;
|
|
237
|
+
const a = resolved.a;
|
|
238
|
+
for (let i = 0;i < dst.length; i += outCh) {
|
|
239
|
+
dst[i] = r;
|
|
240
|
+
dst[i + 1] = g;
|
|
241
|
+
dst[i + 2] = b;
|
|
242
|
+
dst[i + 3] = a;
|
|
243
|
+
}
|
|
244
|
+
for (let y = 0;y < srcH; y++) {
|
|
245
|
+
for (let x = 0;x < srcW; x++) {
|
|
246
|
+
const srcIdx = (y * srcW + x) * srcCh;
|
|
247
|
+
const dstIdx = ((y + bw) * dstW + (x + bw)) * outCh;
|
|
248
|
+
dst[dstIdx] = src[srcIdx];
|
|
249
|
+
dst[dstIdx + 1] = srcCh >= 3 ? src[srcIdx + 1] : src[srcIdx];
|
|
250
|
+
dst[dstIdx + 2] = srcCh >= 3 ? src[srcIdx + 2] : src[srcIdx];
|
|
251
|
+
dst[dstIdx + 3] = srcCh === 4 ? src[srcIdx + 3] : 255;
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
return { data: dst, width: dstW, height: dstH, channels: outCh };
|
|
255
|
+
}
|
|
256
|
+
registerFilterOp2("cpu", "border", (image, { borderWidth, color }) => {
|
|
257
|
+
return CpuImage2.fromImageBinary(cpuBorder(image.getBinary(), borderWidth, color));
|
|
258
|
+
});
|
|
259
|
+
|
|
260
|
+
// src/task/image/brightness/brightness.cpu.ts
|
|
261
|
+
import { CpuImage as CpuImage3, registerFilterOp as registerFilterOp3 } from "@workglow/util/media";
|
|
262
|
+
function cpuBrightness(bin, amount) {
|
|
263
|
+
const { data: src, width, height, channels } = bin;
|
|
264
|
+
const dst = new Uint8ClampedArray(src.length);
|
|
265
|
+
if (channels === 4) {
|
|
266
|
+
for (let i = 0;i < src.length; i += 4) {
|
|
267
|
+
dst[i] = src[i] + amount;
|
|
268
|
+
dst[i + 1] = src[i + 1] + amount;
|
|
269
|
+
dst[i + 2] = src[i + 2] + amount;
|
|
270
|
+
dst[i + 3] = src[i + 3];
|
|
271
|
+
}
|
|
272
|
+
} else {
|
|
273
|
+
for (let i = 0;i < src.length; i++) {
|
|
274
|
+
dst[i] = src[i] + amount;
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
return { data: dst, width, height, channels };
|
|
278
|
+
}
|
|
279
|
+
registerFilterOp3("cpu", "brightness", (image, { amount }) => {
|
|
280
|
+
return CpuImage3.fromImageBinary(cpuBrightness(image.getBinary(), amount));
|
|
281
|
+
});
|
|
282
|
+
|
|
283
|
+
// src/task/image/contrast/contrast.cpu.ts
|
|
284
|
+
import { CpuImage as CpuImage4, registerFilterOp as registerFilterOp4 } from "@workglow/util/media";
|
|
285
|
+
function cpuContrast(bin, amount) {
|
|
286
|
+
const { data: src, width, height, channels } = bin;
|
|
287
|
+
const factor = 259 * (amount + 255) / (255 * (259 - amount));
|
|
288
|
+
const lut = new Uint8ClampedArray(256);
|
|
289
|
+
for (let i = 0;i < 256; i++) {
|
|
290
|
+
lut[i] = factor * (i - 128) + 128;
|
|
291
|
+
}
|
|
292
|
+
const dst = new Uint8ClampedArray(src.length);
|
|
293
|
+
if (channels === 4) {
|
|
294
|
+
for (let i = 0;i < src.length; i += 4) {
|
|
295
|
+
dst[i] = lut[src[i]];
|
|
296
|
+
dst[i + 1] = lut[src[i + 1]];
|
|
297
|
+
dst[i + 2] = lut[src[i + 2]];
|
|
298
|
+
dst[i + 3] = src[i + 3];
|
|
299
|
+
}
|
|
300
|
+
} else {
|
|
301
|
+
for (let i = 0;i < src.length; i++) {
|
|
302
|
+
dst[i] = lut[src[i]];
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
return { data: dst, width, height, channels };
|
|
306
|
+
}
|
|
307
|
+
registerFilterOp4("cpu", "contrast", (image, { amount }) => {
|
|
308
|
+
return CpuImage4.fromImageBinary(cpuContrast(image.getBinary(), amount));
|
|
309
|
+
});
|
|
310
|
+
|
|
311
|
+
// src/task/image/crop/crop.cpu.ts
|
|
312
|
+
import { CpuImage as CpuImage5, registerFilterOp as registerFilterOp5 } from "@workglow/util/media";
|
|
313
|
+
function cpuCrop(bin, left, top, width, height) {
|
|
314
|
+
const { data: src, width: srcW, height: srcH, channels } = bin;
|
|
315
|
+
if (srcW < 1 || srcH < 1) {
|
|
316
|
+
throw new RangeError("Cannot crop an empty image");
|
|
317
|
+
}
|
|
318
|
+
if (left < 0 || left >= srcW || top < 0 || top >= srcH) {
|
|
319
|
+
throw new RangeError("Crop origin is outside the source image bounds");
|
|
320
|
+
}
|
|
321
|
+
const w = Math.min(width, srcW - left);
|
|
322
|
+
const h = Math.min(height, srcH - top);
|
|
323
|
+
const dst = new Uint8ClampedArray(w * h * channels);
|
|
324
|
+
const rowBytes = w * channels;
|
|
325
|
+
for (let row = 0;row < h; row++) {
|
|
326
|
+
const srcOffset = ((top + row) * srcW + left) * channels;
|
|
327
|
+
const dstOffset = row * rowBytes;
|
|
328
|
+
dst.set(src.subarray(srcOffset, srcOffset + rowBytes), dstOffset);
|
|
329
|
+
}
|
|
330
|
+
return { data: dst, width: w, height: h, channels };
|
|
331
|
+
}
|
|
332
|
+
registerFilterOp5("cpu", "crop", (image, { left, top, width, height }) => {
|
|
333
|
+
return CpuImage5.fromImageBinary(cpuCrop(image.getBinary(), left, top, width, height));
|
|
334
|
+
});
|
|
335
|
+
|
|
336
|
+
// src/task/image/flip/flip.cpu.ts
|
|
337
|
+
import { CpuImage as CpuImage6, registerFilterOp as registerFilterOp6 } from "@workglow/util/media";
|
|
338
|
+
function cpuFlip(bin, direction) {
|
|
339
|
+
const { data: src, width, height, channels } = bin;
|
|
340
|
+
const dst = new Uint8ClampedArray(src.length);
|
|
341
|
+
const rowBytes = width * channels;
|
|
342
|
+
if (direction === "vertical") {
|
|
343
|
+
for (let y = 0;y < height; y++) {
|
|
344
|
+
const srcOffset = y * rowBytes;
|
|
345
|
+
const dstOffset = (height - 1 - y) * rowBytes;
|
|
346
|
+
dst.set(src.subarray(srcOffset, srcOffset + rowBytes), dstOffset);
|
|
347
|
+
}
|
|
348
|
+
} else {
|
|
349
|
+
for (let y = 0;y < height; y++) {
|
|
350
|
+
for (let x = 0;x < width; x++) {
|
|
351
|
+
const srcIdx = (y * width + x) * channels;
|
|
352
|
+
const dstIdx = (y * width + (width - 1 - x)) * channels;
|
|
353
|
+
for (let c = 0;c < channels; c++) {
|
|
354
|
+
dst[dstIdx + c] = src[srcIdx + c];
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
return { data: dst, width, height, channels };
|
|
360
|
+
}
|
|
361
|
+
registerFilterOp6("cpu", "flip", (image, { direction }) => {
|
|
362
|
+
return CpuImage6.fromImageBinary(cpuFlip(image.getBinary(), direction));
|
|
363
|
+
});
|
|
364
|
+
|
|
365
|
+
// src/task/image/grayscale/grayscale.cpu.ts
|
|
366
|
+
import { CpuImage as CpuImage7, registerFilterOp as registerFilterOp7 } from "@workglow/util/media";
|
|
367
|
+
function cpuGrayscale(bin) {
|
|
368
|
+
const { data: src, width, height, channels } = bin;
|
|
369
|
+
const pixelCount = width * height;
|
|
370
|
+
const dst = new Uint8ClampedArray(pixelCount * 4);
|
|
371
|
+
for (let i = 0;i < pixelCount; i++) {
|
|
372
|
+
const idx = i * channels;
|
|
373
|
+
let g;
|
|
374
|
+
if (channels === 1) {
|
|
375
|
+
g = src[idx];
|
|
376
|
+
} else {
|
|
377
|
+
g = src[idx] * 77 + src[idx + 1] * 150 + src[idx + 2] * 29 >> 8;
|
|
378
|
+
}
|
|
379
|
+
const a = channels === 4 ? src[idx + 3] : 255;
|
|
380
|
+
const dstIdx = i * 4;
|
|
381
|
+
dst[dstIdx] = g;
|
|
382
|
+
dst[dstIdx + 1] = g;
|
|
383
|
+
dst[dstIdx + 2] = g;
|
|
384
|
+
dst[dstIdx + 3] = a;
|
|
385
|
+
}
|
|
386
|
+
return { data: dst, width, height, channels: 4 };
|
|
387
|
+
}
|
|
388
|
+
registerFilterOp7("cpu", "grayscale", (image, _params) => {
|
|
389
|
+
return CpuImage7.fromImageBinary(cpuGrayscale(image.getBinary()));
|
|
390
|
+
});
|
|
391
|
+
|
|
392
|
+
// src/task/image/invert/invert.cpu.ts
|
|
393
|
+
import { CpuImage as CpuImage8, registerFilterOp as registerFilterOp8 } from "@workglow/util/media";
|
|
394
|
+
function cpuInvert(bin) {
|
|
395
|
+
const { data: src, width, height, channels } = bin;
|
|
396
|
+
const dst = new Uint8ClampedArray(src.length);
|
|
397
|
+
if (channels === 4) {
|
|
398
|
+
for (let i = 0;i < src.length; i += 4) {
|
|
399
|
+
dst[i] = 255 - src[i];
|
|
400
|
+
dst[i + 1] = 255 - src[i + 1];
|
|
401
|
+
dst[i + 2] = 255 - src[i + 2];
|
|
402
|
+
dst[i + 3] = src[i + 3];
|
|
403
|
+
}
|
|
404
|
+
} else {
|
|
405
|
+
for (let i = 0;i < src.length; i++) {
|
|
406
|
+
dst[i] = 255 - src[i];
|
|
407
|
+
}
|
|
408
|
+
}
|
|
409
|
+
return { data: dst, width, height, channels };
|
|
410
|
+
}
|
|
411
|
+
registerFilterOp8("cpu", "invert", (image, _params) => {
|
|
412
|
+
return CpuImage8.fromImageBinary(cpuInvert(image.getBinary()));
|
|
413
|
+
});
|
|
414
|
+
|
|
415
|
+
// src/task/image/pixelate/pixelate.cpu.ts
|
|
416
|
+
import { CpuImage as CpuImage9, registerFilterOp as registerFilterOp9 } from "@workglow/util/media";
|
|
417
|
+
function cpuPixelate(bin, blockSize) {
|
|
418
|
+
const { data: src, width, height, channels } = bin;
|
|
419
|
+
const dst = new Uint8ClampedArray(src.length);
|
|
420
|
+
for (let by = 0;by < height; by += blockSize) {
|
|
421
|
+
const blockH = Math.min(blockSize, height - by);
|
|
422
|
+
for (let bx = 0;bx < width; bx += blockSize) {
|
|
423
|
+
const blockW = Math.min(blockSize, width - bx);
|
|
424
|
+
const blockArea = blockW * blockH;
|
|
425
|
+
const sums = new Array(channels).fill(0);
|
|
426
|
+
for (let y = by;y < by + blockH; y++) {
|
|
427
|
+
for (let x = bx;x < bx + blockW; x++) {
|
|
428
|
+
const idx = (y * width + x) * channels;
|
|
429
|
+
for (let c = 0;c < channels; c++) {
|
|
430
|
+
sums[c] += src[idx + c];
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
const avg = sums.map((s) => s / blockArea + 0.5 | 0);
|
|
435
|
+
for (let y = by;y < by + blockH; y++) {
|
|
436
|
+
for (let x = bx;x < bx + blockW; x++) {
|
|
437
|
+
const idx = (y * width + x) * channels;
|
|
438
|
+
for (let c = 0;c < channels; c++) {
|
|
439
|
+
dst[idx + c] = avg[c];
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
}
|
|
444
|
+
}
|
|
445
|
+
return { data: dst, width, height, channels };
|
|
446
|
+
}
|
|
447
|
+
registerFilterOp9("cpu", "pixelate", (image, { blockSize }) => {
|
|
448
|
+
return CpuImage9.fromImageBinary(cpuPixelate(image.getBinary(), blockSize));
|
|
449
|
+
});
|
|
450
|
+
|
|
451
|
+
// src/task/image/posterize/posterize.cpu.ts
|
|
452
|
+
import { CpuImage as CpuImage10, registerFilterOp as registerFilterOp10 } from "@workglow/util/media";
|
|
453
|
+
function cpuPosterize(bin, levels) {
|
|
454
|
+
const { data: src, width, height, channels } = bin;
|
|
455
|
+
const step = 255 / (levels - 1);
|
|
456
|
+
const lut = new Uint8ClampedArray(256);
|
|
457
|
+
for (let i = 0;i < 256; i++) {
|
|
458
|
+
lut[i] = Math.round(Math.round(i / step) * step);
|
|
459
|
+
}
|
|
460
|
+
const dst = new Uint8ClampedArray(src.length);
|
|
461
|
+
if (channels === 4) {
|
|
462
|
+
for (let i = 0;i < src.length; i += 4) {
|
|
463
|
+
dst[i] = lut[src[i]];
|
|
464
|
+
dst[i + 1] = lut[src[i + 1]];
|
|
465
|
+
dst[i + 2] = lut[src[i + 2]];
|
|
466
|
+
dst[i + 3] = src[i + 3];
|
|
467
|
+
}
|
|
468
|
+
} else {
|
|
469
|
+
for (let i = 0;i < src.length; i++) {
|
|
470
|
+
dst[i] = lut[src[i]];
|
|
471
|
+
}
|
|
472
|
+
}
|
|
473
|
+
return { data: dst, width, height, channels };
|
|
474
|
+
}
|
|
475
|
+
registerFilterOp10("cpu", "posterize", (image, { levels }) => {
|
|
476
|
+
return CpuImage10.fromImageBinary(cpuPosterize(image.getBinary(), levels));
|
|
477
|
+
});
|
|
478
|
+
|
|
479
|
+
// src/task/image/resize/resize.cpu.ts
|
|
480
|
+
import { CpuImage as CpuImage11, registerFilterOp as registerFilterOp11 } from "@workglow/util/media";
|
|
481
|
+
function cpuResize(bin, dstW, dstH) {
|
|
482
|
+
const { data: src, width: srcW, height: srcH, channels } = bin;
|
|
483
|
+
const dst = new Uint8ClampedArray(dstW * dstH * channels);
|
|
484
|
+
for (let dy = 0;dy < dstH; dy++) {
|
|
485
|
+
const srcY = Math.min(Math.floor(dy * srcH / dstH), srcH - 1);
|
|
486
|
+
for (let dx = 0;dx < dstW; dx++) {
|
|
487
|
+
const srcX = Math.min(Math.floor(dx * srcW / dstW), srcW - 1);
|
|
488
|
+
const srcIdx = (srcY * srcW + srcX) * channels;
|
|
489
|
+
const dstIdx = (dy * dstW + dx) * channels;
|
|
490
|
+
for (let c = 0;c < channels; c++) {
|
|
491
|
+
dst[dstIdx + c] = src[srcIdx + c];
|
|
492
|
+
}
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
return { data: dst, width: dstW, height: dstH, channels };
|
|
496
|
+
}
|
|
497
|
+
registerFilterOp11("cpu", "resize", (image, { width, height }) => {
|
|
498
|
+
return CpuImage11.fromImageBinary(cpuResize(image.getBinary(), width, height));
|
|
499
|
+
});
|
|
500
|
+
|
|
501
|
+
// src/task/image/rotate/rotate.cpu.ts
|
|
502
|
+
import { CpuImage as CpuImage12, registerFilterOp as registerFilterOp12 } from "@workglow/util/media";
|
|
503
|
+
function cpuRotate(bin, angle) {
|
|
504
|
+
const { data: src, width: srcW, height: srcH, channels } = bin;
|
|
505
|
+
const swap = angle === 90 || angle === 270;
|
|
506
|
+
const dstW = swap ? srcH : srcW;
|
|
507
|
+
const dstH = swap ? srcW : srcH;
|
|
508
|
+
const dst = new Uint8ClampedArray(dstW * dstH * channels);
|
|
509
|
+
for (let sy = 0;sy < srcH; sy++) {
|
|
510
|
+
for (let sx = 0;sx < srcW; sx++) {
|
|
511
|
+
let dx, dy;
|
|
512
|
+
if (angle === 90) {
|
|
513
|
+
dx = srcH - 1 - sy;
|
|
514
|
+
dy = sx;
|
|
515
|
+
} else if (angle === 180) {
|
|
516
|
+
dx = srcW - 1 - sx;
|
|
517
|
+
dy = srcH - 1 - sy;
|
|
518
|
+
} else {
|
|
519
|
+
dx = sy;
|
|
520
|
+
dy = srcW - 1 - sx;
|
|
521
|
+
}
|
|
522
|
+
const srcIdx = (sy * srcW + sx) * channels;
|
|
523
|
+
const dstIdx = (dy * dstW + dx) * channels;
|
|
524
|
+
for (let c = 0;c < channels; c++) {
|
|
525
|
+
dst[dstIdx + c] = src[srcIdx + c];
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
return { data: dst, width: dstW, height: dstH, channels };
|
|
530
|
+
}
|
|
531
|
+
registerFilterOp12("cpu", "rotate", (image, { angle }) => {
|
|
532
|
+
return CpuImage12.fromImageBinary(cpuRotate(image.getBinary(), angle));
|
|
533
|
+
});
|
|
534
|
+
|
|
535
|
+
// src/task/image/sepia/sepia.cpu.ts
|
|
536
|
+
import { CpuImage as CpuImage13, registerFilterOp as registerFilterOp13 } from "@workglow/util/media";
|
|
537
|
+
function cpuSepia(bin) {
|
|
538
|
+
const { data: src, width, height, channels } = bin;
|
|
539
|
+
const dst = new Uint8ClampedArray(src.length);
|
|
540
|
+
const pixelCount = width * height;
|
|
541
|
+
for (let i = 0;i < pixelCount; i++) {
|
|
542
|
+
const idx = i * channels;
|
|
543
|
+
const r = src[idx];
|
|
544
|
+
const g = channels === 1 ? r : src[idx + 1];
|
|
545
|
+
const b = channels === 1 ? r : src[idx + 2];
|
|
546
|
+
const outR = r * 402 + g * 787 + b * 194 >> 10;
|
|
547
|
+
const outG = r * 357 + g * 702 + b * 172 >> 10;
|
|
548
|
+
const outB = r * 279 + g * 547 + b * 134 >> 10;
|
|
549
|
+
dst[idx] = outR > 255 ? 255 : outR;
|
|
550
|
+
if (channels >= 3) {
|
|
551
|
+
dst[idx + 1] = outG > 255 ? 255 : outG;
|
|
552
|
+
dst[idx + 2] = outB > 255 ? 255 : outB;
|
|
553
|
+
}
|
|
554
|
+
if (channels === 4) {
|
|
555
|
+
dst[idx + 3] = src[idx + 3];
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
return { data: dst, width, height, channels };
|
|
559
|
+
}
|
|
560
|
+
registerFilterOp13("cpu", "sepia", (image, _params) => {
|
|
561
|
+
return CpuImage13.fromImageBinary(cpuSepia(image.getBinary()));
|
|
562
|
+
});
|
|
563
|
+
|
|
564
|
+
// src/task/image/threshold/threshold.cpu.ts
|
|
565
|
+
import { CpuImage as CpuImage14, registerFilterOp as registerFilterOp14 } from "@workglow/util/media";
|
|
566
|
+
function cpuThreshold(bin, value) {
|
|
567
|
+
const { data: src, width, height, channels } = bin;
|
|
568
|
+
const pixelCount = width * height;
|
|
569
|
+
const dst = new Uint8ClampedArray(pixelCount * channels);
|
|
570
|
+
for (let i = 0;i < pixelCount; i++) {
|
|
571
|
+
const idx = i * channels;
|
|
572
|
+
if (channels === 1) {
|
|
573
|
+
dst[idx] = src[idx] >= value ? 255 : 0;
|
|
574
|
+
} else {
|
|
575
|
+
dst[idx] = src[idx] >= value ? 255 : 0;
|
|
576
|
+
dst[idx + 1] = src[idx + 1] >= value ? 255 : 0;
|
|
577
|
+
dst[idx + 2] = src[idx + 2] >= value ? 255 : 0;
|
|
578
|
+
if (channels === 4) {
|
|
579
|
+
dst[idx + 3] = src[idx + 3];
|
|
580
|
+
}
|
|
581
|
+
}
|
|
582
|
+
}
|
|
583
|
+
return { data: dst, width, height, channels };
|
|
584
|
+
}
|
|
585
|
+
registerFilterOp14("cpu", "threshold", (image, { value }) => {
|
|
586
|
+
return CpuImage14.fromImageBinary(cpuThreshold(image.getBinary(), value));
|
|
587
|
+
});
|
|
588
|
+
|
|
589
|
+
// src/task/image/tint/tint.cpu.ts
|
|
590
|
+
import {
|
|
591
|
+
CpuImage as CpuImage15,
|
|
592
|
+
registerFilterOp as registerFilterOp15,
|
|
593
|
+
resolveColor as resolveColor2
|
|
594
|
+
} from "@workglow/util/media";
|
|
595
|
+
function cpuTint(bin, tr, tg, tb, amount) {
|
|
596
|
+
const { data: src, width, height, channels } = bin;
|
|
597
|
+
const invAmount = 1 - amount;
|
|
598
|
+
const tintR = tr * amount;
|
|
599
|
+
const tintG = tg * amount;
|
|
600
|
+
const tintB = tb * amount;
|
|
601
|
+
const pixelCount = width * height;
|
|
602
|
+
if (channels === 1) {
|
|
603
|
+
const dst2 = new Uint8ClampedArray(pixelCount * 3);
|
|
604
|
+
for (let i = 0;i < pixelCount; i++) {
|
|
605
|
+
const gray = src[i];
|
|
606
|
+
dst2[i * 3] = gray * invAmount + tintR;
|
|
607
|
+
dst2[i * 3 + 1] = gray * invAmount + tintG;
|
|
608
|
+
dst2[i * 3 + 2] = gray * invAmount + tintB;
|
|
609
|
+
}
|
|
610
|
+
return { data: dst2, width, height, channels: 3 };
|
|
611
|
+
}
|
|
612
|
+
const dst = new Uint8ClampedArray(src.length);
|
|
613
|
+
for (let i = 0;i < pixelCount; i++) {
|
|
614
|
+
const idx = i * channels;
|
|
615
|
+
dst[idx] = src[idx] * invAmount + tintR;
|
|
616
|
+
dst[idx + 1] = src[idx + 1] * invAmount + tintG;
|
|
617
|
+
dst[idx + 2] = src[idx + 2] * invAmount + tintB;
|
|
618
|
+
if (channels === 4) {
|
|
619
|
+
dst[idx + 3] = src[idx + 3];
|
|
620
|
+
}
|
|
621
|
+
}
|
|
622
|
+
return { data: dst, width, height, channels };
|
|
623
|
+
}
|
|
624
|
+
registerFilterOp15("cpu", "tint", (image, { color, amount }) => {
|
|
625
|
+
const { r: tr, g: tg, b: tb } = resolveColor2(color);
|
|
626
|
+
return CpuImage15.fromImageBinary(cpuTint(image.getBinary(), tr, tg, tb, amount));
|
|
627
|
+
});
|
|
628
|
+
|
|
629
|
+
// src/task/image/transparency/transparency.cpu.ts
|
|
630
|
+
import { CpuImage as CpuImage16, registerFilterOp as registerFilterOp16 } from "@workglow/util/media";
|
|
631
|
+
function cpuTransparency(bin, amount) {
|
|
632
|
+
const { data: src, width, height, channels: srcCh } = bin;
|
|
633
|
+
const pixelCount = width * height;
|
|
634
|
+
const dst = new Uint8ClampedArray(pixelCount * 4);
|
|
635
|
+
const alphaScale = Math.round(amount * 255);
|
|
636
|
+
for (let i = 0;i < pixelCount; i++) {
|
|
637
|
+
const srcIdx = i * srcCh;
|
|
638
|
+
const dstIdx = i * 4;
|
|
639
|
+
dst[dstIdx] = src[srcIdx];
|
|
640
|
+
dst[dstIdx + 1] = srcCh >= 3 ? src[srcIdx + 1] : src[srcIdx];
|
|
641
|
+
dst[dstIdx + 2] = srcCh >= 3 ? src[srcIdx + 2] : src[srcIdx];
|
|
642
|
+
const srcAlpha = srcCh === 4 ? src[srcIdx + 3] : 255;
|
|
643
|
+
dst[dstIdx + 3] = (srcAlpha * alphaScale + 127) / 255;
|
|
644
|
+
}
|
|
645
|
+
return { data: dst, width, height, channels: 4 };
|
|
646
|
+
}
|
|
647
|
+
registerFilterOp16("cpu", "transparency", (image, { amount }) => {
|
|
648
|
+
return CpuImage16.fromImageBinary(cpuTransparency(image.getBinary(), amount));
|
|
649
|
+
});
|
|
650
|
+
|
|
651
|
+
// src/task/image/blur/blur.webgpu.ts
|
|
652
|
+
import { registerFilterOp as registerFilterOp17, VERTEX_PRELUDE } from "@workglow/util/media";
|
|
653
|
+
var SHADER_SRC = `${VERTEX_PRELUDE}
|
|
654
|
+
struct U { radius: u32, direction: u32, width: f32, height: f32 };
|
|
655
|
+
@group(0) @binding(2) var<uniform> u: U;
|
|
656
|
+
|
|
657
|
+
@fragment
|
|
658
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
659
|
+
let r = i32(u.radius);
|
|
660
|
+
var sum = vec4f(0.0);
|
|
661
|
+
let texel = vec2f(1.0 / u.width, 1.0 / u.height);
|
|
662
|
+
// direction: 0 = horizontal, 1 = vertical.
|
|
663
|
+
let dir = select(vec2f(0.0, texel.y), vec2f(texel.x, 0.0), u.direction == 0u);
|
|
664
|
+
for (var k: i32 = -r; k <= r; k = k + 1) {
|
|
665
|
+
let uv = clamp(in.uv + dir * f32(k), vec2f(0.0), vec2f(1.0));
|
|
666
|
+
sum = sum + textureSample(src, src_sampler, uv);
|
|
667
|
+
}
|
|
668
|
+
let n = f32(2 * r + 1);
|
|
669
|
+
return sum / n;
|
|
670
|
+
}
|
|
671
|
+
`;
|
|
672
|
+
function makeUniforms(radius, direction, width, height) {
|
|
673
|
+
const buf = new ArrayBuffer(16);
|
|
674
|
+
const u = new Uint32Array(buf, 0, 2);
|
|
675
|
+
u[0] = Math.max(1, radius | 0);
|
|
676
|
+
u[1] = direction;
|
|
677
|
+
const f = new Float32Array(buf, 8, 2);
|
|
678
|
+
f[0] = width;
|
|
679
|
+
f[1] = height;
|
|
680
|
+
return buf;
|
|
681
|
+
}
|
|
682
|
+
registerFilterOp17("webgpu", "blur", (image, { radius }) => {
|
|
683
|
+
const w = image.width;
|
|
684
|
+
const h = image.height;
|
|
685
|
+
const horiz = image.apply({
|
|
686
|
+
shader: SHADER_SRC,
|
|
687
|
+
uniforms: makeUniforms(radius, 0, w, h)
|
|
688
|
+
});
|
|
689
|
+
const vert = horiz.apply({ shader: SHADER_SRC, uniforms: makeUniforms(radius, 1, w, h) });
|
|
690
|
+
horiz.release();
|
|
691
|
+
return vert;
|
|
692
|
+
});
|
|
693
|
+
|
|
694
|
+
// src/task/image/border/border.webgpu.ts
|
|
695
|
+
import { VERTEX_PRELUDE as VERTEX_PRELUDE2, registerFilterOp as registerFilterOp18, resolveColor as resolveColor3 } from "@workglow/util/media";
|
|
696
|
+
var SHADER_SRC2 = `${VERTEX_PRELUDE2}
|
|
697
|
+
struct U { color: vec4f, borderWidth: f32, srcWidth: f32, srcHeight: f32, _pad: f32 };
|
|
698
|
+
@group(0) @binding(2) var<uniform> u: U;
|
|
699
|
+
|
|
700
|
+
@fragment
|
|
701
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
702
|
+
let outW = u.srcWidth + 2.0 * u.borderWidth;
|
|
703
|
+
let outH = u.srcHeight + 2.0 * u.borderWidth;
|
|
704
|
+
let px = in.uv.x * outW;
|
|
705
|
+
let py = in.uv.y * outH;
|
|
706
|
+
let inside = px >= u.borderWidth && px < (u.borderWidth + u.srcWidth)
|
|
707
|
+
&& py >= u.borderWidth && py < (u.borderWidth + u.srcHeight);
|
|
708
|
+
if (!inside) {
|
|
709
|
+
return u.color;
|
|
710
|
+
}
|
|
711
|
+
let sx = (px - u.borderWidth) / u.srcWidth;
|
|
712
|
+
let sy = (py - u.borderWidth) / u.srcHeight;
|
|
713
|
+
return textureSample(src, src_sampler, vec2f(sx, sy));
|
|
714
|
+
}
|
|
715
|
+
`;
|
|
716
|
+
registerFilterOp18("webgpu", "border", (image, { borderWidth, color }) => {
|
|
717
|
+
const w = image.width;
|
|
718
|
+
const h = image.height;
|
|
719
|
+
const c = resolveColor3(color);
|
|
720
|
+
const buf = new ArrayBuffer(32);
|
|
721
|
+
const f = new Float32Array(buf);
|
|
722
|
+
f[0] = c.r / 255;
|
|
723
|
+
f[1] = c.g / 255;
|
|
724
|
+
f[2] = c.b / 255;
|
|
725
|
+
f[3] = c.a / 255;
|
|
726
|
+
f[4] = borderWidth;
|
|
727
|
+
f[5] = w;
|
|
728
|
+
f[6] = h;
|
|
729
|
+
return image.apply({
|
|
730
|
+
shader: SHADER_SRC2,
|
|
731
|
+
uniforms: buf,
|
|
732
|
+
outSize: { width: w + 2 * borderWidth, height: h + 2 * borderWidth }
|
|
733
|
+
});
|
|
734
|
+
});
|
|
735
|
+
|
|
736
|
+
// src/task/image/brightness/brightness.webgpu.ts
|
|
737
|
+
import { registerFilterOp as registerFilterOp19, VERTEX_PRELUDE as VERTEX_PRELUDE3 } from "@workglow/util/media";
|
|
738
|
+
var SHADER_SRC3 = `${VERTEX_PRELUDE3}
|
|
739
|
+
struct U { amount: f32 };
|
|
740
|
+
@group(0) @binding(2) var<uniform> u: U;
|
|
741
|
+
|
|
742
|
+
@fragment
|
|
743
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
744
|
+
let s = textureSample(src, src_sampler, in.uv);
|
|
745
|
+
let d = u.amount / 255.0;
|
|
746
|
+
return vec4f(clamp(s.rgb + vec3f(d), vec3f(0.0), vec3f(1.0)), s.a);
|
|
747
|
+
}
|
|
748
|
+
`;
|
|
749
|
+
registerFilterOp19("webgpu", "brightness", (image, { amount }) => {
|
|
750
|
+
const buf = new ArrayBuffer(16);
|
|
751
|
+
new Float32Array(buf, 0, 1)[0] = amount;
|
|
752
|
+
return image.apply({ shader: SHADER_SRC3, uniforms: buf });
|
|
753
|
+
});
|
|
754
|
+
|
|
755
|
+
// src/task/image/contrast/contrast.webgpu.ts
|
|
756
|
+
import { registerFilterOp as registerFilterOp20, VERTEX_PRELUDE as VERTEX_PRELUDE4 } from "@workglow/util/media";
|
|
757
|
+
var SHADER_SRC4 = `${VERTEX_PRELUDE4}
|
|
758
|
+
struct U { amount: f32 };
|
|
759
|
+
@group(0) @binding(2) var<uniform> u: U;
|
|
760
|
+
|
|
761
|
+
@fragment
|
|
762
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
763
|
+
let s = textureSample(src, src_sampler, in.uv);
|
|
764
|
+
// Standard "GIMP" contrast curve, matching cpuContrast.
|
|
765
|
+
let factor = (259.0 * (u.amount + 255.0)) / (255.0 * (259.0 - u.amount));
|
|
766
|
+
let rgb = factor * (s.rgb - vec3f(0.5)) + vec3f(0.5);
|
|
767
|
+
return vec4f(clamp(rgb, vec3f(0.0), vec3f(1.0)), s.a);
|
|
768
|
+
}
|
|
769
|
+
`;
|
|
770
|
+
registerFilterOp20("webgpu", "contrast", (image, { amount }) => {
|
|
771
|
+
const buf = new ArrayBuffer(16);
|
|
772
|
+
new Float32Array(buf, 0, 1)[0] = amount;
|
|
773
|
+
return image.apply({ shader: SHADER_SRC4, uniforms: buf });
|
|
774
|
+
});
|
|
775
|
+
|
|
776
|
+
// src/task/image/crop/crop.webgpu.ts
|
|
777
|
+
import { registerFilterOp as registerFilterOp21, VERTEX_PRELUDE as VERTEX_PRELUDE5 } from "@workglow/util/media";
|
|
778
|
+
var SHADER_SRC5 = `${VERTEX_PRELUDE5}
|
|
779
|
+
struct U { left: f32, top: f32, srcWidth: f32, srcHeight: f32, outWidth: f32, outHeight: f32 };
|
|
780
|
+
@group(0) @binding(2) var<uniform> u: U;
|
|
781
|
+
|
|
782
|
+
@fragment
|
|
783
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
784
|
+
let px = u.left + in.uv.x * u.outWidth;
|
|
785
|
+
let py = u.top + in.uv.y * u.outHeight;
|
|
786
|
+
let uv = vec2f(px / u.srcWidth, py / u.srcHeight);
|
|
787
|
+
return textureSample(src, src_sampler, uv);
|
|
788
|
+
}
|
|
789
|
+
`;
|
|
790
|
+
registerFilterOp21("webgpu", "crop", (image, { left, top, width, height }) => {
|
|
791
|
+
const w = image.width;
|
|
792
|
+
const h = image.height;
|
|
793
|
+
const buf = new ArrayBuffer(32);
|
|
794
|
+
const f = new Float32Array(buf);
|
|
795
|
+
f[0] = left;
|
|
796
|
+
f[1] = top;
|
|
797
|
+
f[2] = w;
|
|
798
|
+
f[3] = h;
|
|
799
|
+
f[4] = width;
|
|
800
|
+
f[5] = height;
|
|
801
|
+
return image.apply({
|
|
802
|
+
shader: SHADER_SRC5,
|
|
803
|
+
uniforms: buf,
|
|
804
|
+
outSize: { width, height }
|
|
805
|
+
});
|
|
806
|
+
});
|
|
807
|
+
|
|
808
|
+
// src/task/image/flip/flip.webgpu.ts
|
|
809
|
+
import { registerFilterOp as registerFilterOp22, VERTEX_PRELUDE as VERTEX_PRELUDE6 } from "@workglow/util/media";
|
|
810
|
+
var SHADER_SRC6 = `${VERTEX_PRELUDE6}
|
|
811
|
+
struct U { direction: u32 };
|
|
812
|
+
@group(0) @binding(2) var<uniform> u: U;
|
|
813
|
+
|
|
814
|
+
@fragment
|
|
815
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
816
|
+
// direction: 0 = horizontal (flip x), 1 = vertical (flip y).
|
|
817
|
+
let flipX = u.direction == 0u;
|
|
818
|
+
let flipY = u.direction == 1u;
|
|
819
|
+
let uv = vec2f(
|
|
820
|
+
select(in.uv.x, 1.0 - in.uv.x, flipX),
|
|
821
|
+
select(in.uv.y, 1.0 - in.uv.y, flipY),
|
|
822
|
+
);
|
|
823
|
+
return textureSample(src, src_sampler, uv);
|
|
824
|
+
}
|
|
825
|
+
`;
|
|
826
|
+
var DIRECTION_TO_CODE = { horizontal: 0, vertical: 1 };
|
|
827
|
+
registerFilterOp22("webgpu", "flip", (image, { direction }) => {
|
|
828
|
+
const buf = new ArrayBuffer(16);
|
|
829
|
+
new Uint32Array(buf, 0, 1)[0] = DIRECTION_TO_CODE[direction];
|
|
830
|
+
return image.apply({ shader: SHADER_SRC6, uniforms: buf });
|
|
831
|
+
});
|
|
832
|
+
|
|
833
|
+
// src/task/image/grayscale/grayscale.webgpu.ts
|
|
834
|
+
import { registerFilterOp as registerFilterOp23, VERTEX_PRELUDE as VERTEX_PRELUDE7 } from "@workglow/util/media";
|
|
835
|
+
var SHADER_SRC7 = `${VERTEX_PRELUDE7}
|
|
836
|
+
@fragment
|
|
837
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
838
|
+
let s = textureSample(src, src_sampler, in.uv);
|
|
839
|
+
// Match cpuGrayscale's BT.601-ish (77,150,29)/256 weights.
|
|
840
|
+
let g = (s.r * 77.0 + s.g * 150.0 + s.b * 29.0) / 256.0;
|
|
841
|
+
return vec4f(g, g, g, s.a);
|
|
842
|
+
}
|
|
843
|
+
`;
|
|
844
|
+
registerFilterOp23("webgpu", "grayscale", (image, _params) => {
|
|
845
|
+
return image.apply({ shader: SHADER_SRC7, uniforms: undefined });
|
|
846
|
+
});
|
|
847
|
+
|
|
848
|
+
// src/task/image/invert/invert.webgpu.ts
|
|
849
|
+
import { registerFilterOp as registerFilterOp24, VERTEX_PRELUDE as VERTEX_PRELUDE8 } from "@workglow/util/media";
|
|
850
|
+
var SHADER_SRC8 = `${VERTEX_PRELUDE8}
|
|
851
|
+
@fragment
|
|
852
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
853
|
+
let s = textureSample(src, src_sampler, in.uv);
|
|
854
|
+
return vec4f(1.0 - s.rgb, s.a);
|
|
855
|
+
}
|
|
856
|
+
`;
|
|
857
|
+
registerFilterOp24("webgpu", "invert", (image, _params) => {
|
|
858
|
+
return image.apply({ shader: SHADER_SRC8, uniforms: undefined });
|
|
859
|
+
});
|
|
860
|
+
|
|
861
|
+
// src/task/image/pixelate/pixelate.webgpu.ts
|
|
862
|
+
import { registerFilterOp as registerFilterOp25, VERTEX_PRELUDE as VERTEX_PRELUDE9 } from "@workglow/util/media";
|
|
863
|
+
var SHADER_SRC9 = `${VERTEX_PRELUDE9}
|
|
864
|
+
struct U { blockSize: u32, width: u32, height: u32, _pad: u32 };
|
|
865
|
+
@group(0) @binding(2) var<uniform> u: U;
|
|
866
|
+
|
|
867
|
+
@fragment
|
|
868
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
869
|
+
let px = u32(in.uv.x * f32(u.width));
|
|
870
|
+
let py = u32(in.uv.y * f32(u.height));
|
|
871
|
+
let bx = px / u.blockSize;
|
|
872
|
+
let by = py / u.blockSize;
|
|
873
|
+
let startX = bx * u.blockSize;
|
|
874
|
+
let startY = by * u.blockSize;
|
|
875
|
+
let endX = min(startX + u.blockSize, u.width);
|
|
876
|
+
let endY = min(startY + u.blockSize, u.height);
|
|
877
|
+
|
|
878
|
+
let invW = 1.0 / f32(u.width);
|
|
879
|
+
let invH = 1.0 / f32(u.height);
|
|
880
|
+
|
|
881
|
+
var sum = vec4f(0.0);
|
|
882
|
+
var count: f32 = 0.0;
|
|
883
|
+
for (var sy: u32 = startY; sy < endY; sy = sy + 1u) {
|
|
884
|
+
for (var sx: u32 = startX; sx < endX; sx = sx + 1u) {
|
|
885
|
+
// Sample at exact texel center. With a linear sampler, the bilinear
|
|
886
|
+
// weight collapses to 1.0 on this texel, giving a lossless read.
|
|
887
|
+
let suv = vec2f((f32(sx) + 0.5) * invW, (f32(sy) + 0.5) * invH);
|
|
888
|
+
sum = sum + textureSampleLevel(src, src_sampler, suv, 0.0);
|
|
889
|
+
count = count + 1.0;
|
|
890
|
+
}
|
|
891
|
+
}
|
|
892
|
+
return sum / count;
|
|
893
|
+
}
|
|
894
|
+
`;
|
|
895
|
+
registerFilterOp25("webgpu", "pixelate", (image, { blockSize }) => {
|
|
896
|
+
const w = image.width;
|
|
897
|
+
const h = image.height;
|
|
898
|
+
const buf = new ArrayBuffer(16);
|
|
899
|
+
const u = new Uint32Array(buf);
|
|
900
|
+
u[0] = Math.max(1, blockSize | 0);
|
|
901
|
+
u[1] = w;
|
|
902
|
+
u[2] = h;
|
|
903
|
+
return image.apply({ shader: SHADER_SRC9, uniforms: buf });
|
|
904
|
+
});
|
|
905
|
+
|
|
906
|
+
// src/task/image/posterize/posterize.webgpu.ts
|
|
907
|
+
import { registerFilterOp as registerFilterOp26, VERTEX_PRELUDE as VERTEX_PRELUDE10 } from "@workglow/util/media";
|
|
908
|
+
var SHADER_SRC10 = `${VERTEX_PRELUDE10}
|
|
909
|
+
struct U { levels: f32 };
|
|
910
|
+
@group(0) @binding(2) var<uniform> u: U;
|
|
911
|
+
|
|
912
|
+
@fragment
|
|
913
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
914
|
+
let s = textureSample(src, src_sampler, in.uv);
|
|
915
|
+
let n = max(u.levels, 2.0);
|
|
916
|
+
// Round-to-nearest quantization, matching cpuPosterize's LUT semantics.
|
|
917
|
+
let q = round(s.rgb * (n - 1.0)) / (n - 1.0);
|
|
918
|
+
return vec4f(clamp(q, vec3f(0.0), vec3f(1.0)), s.a);
|
|
919
|
+
}
|
|
920
|
+
`;
|
|
921
|
+
registerFilterOp26("webgpu", "posterize", (image, { levels }) => {
|
|
922
|
+
const buf = new ArrayBuffer(16);
|
|
923
|
+
new Float32Array(buf, 0, 1)[0] = levels;
|
|
924
|
+
return image.apply({ shader: SHADER_SRC10, uniforms: buf });
|
|
925
|
+
});
|
|
926
|
+
|
|
927
|
+
// src/task/image/resize/resize.webgpu.ts
|
|
928
|
+
import { registerFilterOp as registerFilterOp27, VERTEX_PRELUDE as VERTEX_PRELUDE11 } from "@workglow/util/media";
|
|
929
|
+
var SHADER_SRC11 = `${VERTEX_PRELUDE11}
|
|
930
|
+
@fragment
|
|
931
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
932
|
+
return textureSample(src, src_sampler, in.uv);
|
|
933
|
+
}
|
|
934
|
+
`;
|
|
935
|
+
registerFilterOp27("webgpu", "resize", (image, { width, height }) => {
|
|
936
|
+
return image.apply({
|
|
937
|
+
shader: SHADER_SRC11,
|
|
938
|
+
uniforms: undefined,
|
|
939
|
+
outSize: { width, height }
|
|
940
|
+
});
|
|
941
|
+
});
|
|
942
|
+
|
|
943
|
+
// src/task/image/rotate/rotate.webgpu.ts
|
|
944
|
+
import { registerFilterOp as registerFilterOp28, VERTEX_PRELUDE as VERTEX_PRELUDE12 } from "@workglow/util/media";
|
|
945
|
+
var SHADER_SRC12 = `${VERTEX_PRELUDE12}
|
|
946
|
+
struct U { angle: u32 };
|
|
947
|
+
@group(0) @binding(2) var<uniform> u: U;
|
|
948
|
+
|
|
949
|
+
@fragment
|
|
950
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
951
|
+
// Map output uv back to source uv via inverse rotation.
|
|
952
|
+
var srcUv = in.uv;
|
|
953
|
+
if (u.angle == 90u) {
|
|
954
|
+
srcUv = vec2f(in.uv.y, 1.0 - in.uv.x);
|
|
955
|
+
} else if (u.angle == 180u) {
|
|
956
|
+
srcUv = vec2f(1.0 - in.uv.x, 1.0 - in.uv.y);
|
|
957
|
+
} else if (u.angle == 270u) {
|
|
958
|
+
srcUv = vec2f(1.0 - in.uv.y, in.uv.x);
|
|
959
|
+
}
|
|
960
|
+
return textureSample(src, src_sampler, srcUv);
|
|
961
|
+
}
|
|
962
|
+
`;
|
|
963
|
+
registerFilterOp28("webgpu", "rotate", (image, { angle }) => {
|
|
964
|
+
const w = image.width;
|
|
965
|
+
const h = image.height;
|
|
966
|
+
const swap = angle === 90 || angle === 270;
|
|
967
|
+
const buf = new ArrayBuffer(16);
|
|
968
|
+
new Uint32Array(buf, 0, 1)[0] = angle;
|
|
969
|
+
return image.apply({
|
|
970
|
+
shader: SHADER_SRC12,
|
|
971
|
+
uniforms: buf,
|
|
972
|
+
outSize: { width: swap ? h : w, height: swap ? w : h }
|
|
973
|
+
});
|
|
974
|
+
});
|
|
975
|
+
|
|
976
|
+
// src/task/image/sepia/sepia.webgpu.ts
|
|
977
|
+
import { registerFilterOp as registerFilterOp29, VERTEX_PRELUDE as VERTEX_PRELUDE13 } from "@workglow/util/media";
|
|
978
|
+
var SHADER_SRC13 = `${VERTEX_PRELUDE13}
|
|
979
|
+
@fragment
|
|
980
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
981
|
+
let s = textureSample(src, src_sampler, in.uv);
|
|
982
|
+
let r = s.r * 0.393 + s.g * 0.769 + s.b * 0.189;
|
|
983
|
+
let g = s.r * 0.349 + s.g * 0.686 + s.b * 0.168;
|
|
984
|
+
let b = s.r * 0.272 + s.g * 0.534 + s.b * 0.131;
|
|
985
|
+
return vec4f(clamp(r, 0.0, 1.0), clamp(g, 0.0, 1.0), clamp(b, 0.0, 1.0), s.a);
|
|
986
|
+
}
|
|
987
|
+
`;
|
|
988
|
+
registerFilterOp29("webgpu", "sepia", (image, _params) => {
|
|
989
|
+
return image.apply({ shader: SHADER_SRC13, uniforms: undefined });
|
|
990
|
+
});
|
|
991
|
+
|
|
992
|
+
// src/task/image/threshold/threshold.webgpu.ts
|
|
993
|
+
import { registerFilterOp as registerFilterOp30, VERTEX_PRELUDE as VERTEX_PRELUDE14 } from "@workglow/util/media";
|
|
994
|
+
var SHADER_SRC14 = `${VERTEX_PRELUDE14}
|
|
995
|
+
struct U { value: f32 };
|
|
996
|
+
@group(0) @binding(2) var<uniform> u: U;
|
|
997
|
+
|
|
998
|
+
@fragment
|
|
999
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
1000
|
+
let s = textureSample(src, src_sampler, in.uv);
|
|
1001
|
+
let v = u.value / 255.0;
|
|
1002
|
+
// Per-channel binary threshold to match cpuThreshold (each of R/G/B
|
|
1003
|
+
// compared independently); alpha preserved.
|
|
1004
|
+
let r = select(0.0, 1.0, s.r >= v);
|
|
1005
|
+
let g = select(0.0, 1.0, s.g >= v);
|
|
1006
|
+
let b = select(0.0, 1.0, s.b >= v);
|
|
1007
|
+
return vec4f(r, g, b, s.a);
|
|
1008
|
+
}
|
|
1009
|
+
`;
|
|
1010
|
+
registerFilterOp30("webgpu", "threshold", (image, { value }) => {
|
|
1011
|
+
const buf = new ArrayBuffer(16);
|
|
1012
|
+
new Float32Array(buf, 0, 1)[0] = value;
|
|
1013
|
+
return image.apply({ shader: SHADER_SRC14, uniforms: buf });
|
|
1014
|
+
});
|
|
1015
|
+
|
|
1016
|
+
// src/task/image/tint/tint.webgpu.ts
|
|
1017
|
+
import { VERTEX_PRELUDE as VERTEX_PRELUDE15, registerFilterOp as registerFilterOp31, resolveColor as resolveColor4 } from "@workglow/util/media";
|
|
1018
|
+
var SHADER_SRC15 = `${VERTEX_PRELUDE15}
|
|
1019
|
+
struct U { color: vec4f, amount: f32 };
|
|
1020
|
+
@group(0) @binding(2) var<uniform> u: U;
|
|
1021
|
+
|
|
1022
|
+
@fragment
|
|
1023
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
1024
|
+
let s = textureSample(src, src_sampler, in.uv);
|
|
1025
|
+
let tinted = mix(s.rgb, u.color.rgb, u.amount);
|
|
1026
|
+
return vec4f(clamp(tinted, vec3f(0.0), vec3f(1.0)), s.a);
|
|
1027
|
+
}
|
|
1028
|
+
`;
|
|
1029
|
+
registerFilterOp31("webgpu", "tint", (image, { color, amount }) => {
|
|
1030
|
+
const c = resolveColor4(color);
|
|
1031
|
+
const buf = new ArrayBuffer(32);
|
|
1032
|
+
const f = new Float32Array(buf);
|
|
1033
|
+
f[0] = c.r / 255;
|
|
1034
|
+
f[1] = c.g / 255;
|
|
1035
|
+
f[2] = c.b / 255;
|
|
1036
|
+
f[3] = 1;
|
|
1037
|
+
f[4] = amount;
|
|
1038
|
+
return image.apply({ shader: SHADER_SRC15, uniforms: buf });
|
|
1039
|
+
});
|
|
1040
|
+
|
|
1041
|
+
// src/task/image/transparency/transparency.webgpu.ts
|
|
1042
|
+
import { registerFilterOp as registerFilterOp32, VERTEX_PRELUDE as VERTEX_PRELUDE16 } from "@workglow/util/media";
|
|
1043
|
+
var SHADER_SRC16 = `${VERTEX_PRELUDE16}
|
|
1044
|
+
struct U { amount: f32 };
|
|
1045
|
+
@group(0) @binding(2) var<uniform> u: U;
|
|
1046
|
+
|
|
1047
|
+
@fragment
|
|
1048
|
+
fn fs(in: VsOut) -> @location(0) vec4f {
|
|
1049
|
+
let s = textureSample(src, src_sampler, in.uv);
|
|
1050
|
+
return vec4f(s.rgb, s.a * u.amount);
|
|
1051
|
+
}
|
|
1052
|
+
`;
|
|
1053
|
+
registerFilterOp32("webgpu", "transparency", (image, { amount }) => {
|
|
1054
|
+
const buf = new ArrayBuffer(16);
|
|
1055
|
+
new Float32Array(buf, 0, 1)[0] = amount;
|
|
1056
|
+
return image.apply({ shader: SHADER_SRC16, uniforms: buf });
|
|
1057
|
+
});
|
|
1058
|
+
|
|
1059
|
+
// src/codec.browser.ts
|
|
1060
|
+
import { applyFilter, registerPreviewResizeFn } from "@workglow/util/media";
|
|
1061
|
+
registerPreviewResizeFn((image, width, height) => applyFilter(image, "resize", { width, height }));
|
|
1062
|
+
|
|
179
1063
|
// src/task/image/imageTextRender.ts
|
|
180
1064
|
import { createServiceToken, globalServiceRegistry } from "@workglow/util";
|
|
181
1065
|
var IMAGE_TEXT_ANCHOR_POSITIONS = [
|
|
@@ -5762,197 +6646,118 @@ ${input.prompt}` : input.prompt : this.config.message ?? "";
|
|
|
5762
6646
|
}
|
|
5763
6647
|
}
|
|
5764
6648
|
Workflow16.prototype.humanInput = CreateWorkflow15(HumanInputTask);
|
|
5765
|
-
|
|
6649
|
+
|
|
6650
|
+
// src/common.ts
|
|
5766
6651
|
import {
|
|
5767
|
-
|
|
5768
|
-
|
|
5769
|
-
|
|
5770
|
-
|
|
6652
|
+
registerFilterOp as registerFilterOp33,
|
|
6653
|
+
applyFilter as applyFilter3,
|
|
6654
|
+
hasFilterOp as hasFilterOp2,
|
|
6655
|
+
_resetFilterRegistryForTests
|
|
6656
|
+
} from "@workglow/util/media";
|
|
5771
6657
|
|
|
5772
|
-
// src/task/image/
|
|
5773
|
-
import {
|
|
5774
|
-
|
|
5775
|
-
|
|
5776
|
-
|
|
5777
|
-
|
|
5778
|
-
|
|
5779
|
-
|
|
5780
|
-
|
|
5781
|
-
|
|
5782
|
-
|
|
5783
|
-
};
|
|
5784
|
-
|
|
6658
|
+
// src/task/image/ImageFilterTask.ts
|
|
6659
|
+
import {
|
|
6660
|
+
Task as Task42
|
|
6661
|
+
} from "@workglow/task-graph";
|
|
6662
|
+
import {
|
|
6663
|
+
applyFilter as applyFilter2,
|
|
6664
|
+
CpuImage as CpuImage17,
|
|
6665
|
+
GpuImageFactory,
|
|
6666
|
+
getGpuImageFactory,
|
|
6667
|
+
hasFilterOp,
|
|
6668
|
+
previewSource
|
|
6669
|
+
} from "@workglow/util/media";
|
|
6670
|
+
|
|
6671
|
+
class ImageFilterTask extends Task42 {
|
|
6672
|
+
scalePreviewParams(params, _scale) {
|
|
6673
|
+
return params;
|
|
6674
|
+
}
|
|
6675
|
+
async hydrateInput(image) {
|
|
6676
|
+
if (image !== null && typeof image === "object" && "backend" in image && "retain" in image && "release" in image && "materialize" in image) {
|
|
6677
|
+
return image;
|
|
6678
|
+
}
|
|
6679
|
+
if (typeof image === "string" && image.startsWith("data:")) {
|
|
6680
|
+
return GpuImageFactory.fromDataUri(image);
|
|
6681
|
+
}
|
|
6682
|
+
if (typeof Blob !== "undefined" && image instanceof Blob) {
|
|
6683
|
+
return GpuImageFactory.fromBlob(image);
|
|
6684
|
+
}
|
|
6685
|
+
if (typeof ImageBitmap !== "undefined" && image instanceof ImageBitmap) {
|
|
6686
|
+
const fromImageBitmap = getGpuImageFactory("fromImageBitmap");
|
|
6687
|
+
if (!fromImageBitmap) {
|
|
6688
|
+
throw new Error("ImageFilterTask: received ImageBitmap but GpuImage.fromImageBitmap is not registered " + "in this runtime. ImageBitmap inputs require the browser entry point.");
|
|
6689
|
+
}
|
|
6690
|
+
return fromImageBitmap(image);
|
|
6691
|
+
}
|
|
6692
|
+
if (image !== null && typeof image === "object" && "data" in image && "width" in image && "height" in image && "channels" in image) {
|
|
6693
|
+
const bin = image;
|
|
6694
|
+
const asyncFactory = getGpuImageFactory("fromImageBinaryAsync");
|
|
6695
|
+
if (asyncFactory)
|
|
6696
|
+
return asyncFactory(bin);
|
|
6697
|
+
return GpuImageFactory.fromImageBinary(bin);
|
|
6698
|
+
}
|
|
6699
|
+
const ctor = image && typeof image === "object" && image.constructor ? image.constructor.name : typeof image;
|
|
6700
|
+
const keys = image && typeof image === "object" ? Object.keys(image).slice(0, 10).join(", ") : "";
|
|
6701
|
+
throw new Error(`ImageFilterTask: input.image is not a recognized image shape (got ${ctor}` + (keys ? ` with keys [${keys}]` : "") + `). Expected one of: GpuImage instance, raw ImageBinary, Blob, ImageBitmap, ` + `or a data: URI string.`);
|
|
6702
|
+
}
|
|
6703
|
+
async execute(input, ctx) {
|
|
6704
|
+
let inputImage = await this.hydrateInput(input.image);
|
|
6705
|
+
if (!hasFilterOp(inputImage.backend, this.filterName)) {
|
|
6706
|
+
const bin = await inputImage.materialize();
|
|
6707
|
+
const cpu = CpuImage17.fromImageBinary(bin, inputImage.previewScale);
|
|
6708
|
+
inputImage.release();
|
|
6709
|
+
inputImage = cpu;
|
|
6710
|
+
}
|
|
6711
|
+
const params = this.scalePreviewParams(this.opParams(input), inputImage.previewScale);
|
|
6712
|
+
const out = applyFilter2(inputImage, this.filterName, params);
|
|
6713
|
+
inputImage.release();
|
|
6714
|
+
ctx.resourceScope?.register(`gpuimage:${String(this.id)}:image`, async () => out.release());
|
|
6715
|
+
return { image: out };
|
|
6716
|
+
}
|
|
6717
|
+
async executePreview(input, _ctx) {
|
|
6718
|
+
const inputImage = await this.hydrateInput(input.image);
|
|
6719
|
+
let sourced = previewSource(inputImage);
|
|
6720
|
+
if (!hasFilterOp(sourced.backend, this.filterName)) {
|
|
6721
|
+
const bin = await sourced.materialize();
|
|
6722
|
+
const cpu = CpuImage17.fromImageBinary(bin, sourced.previewScale);
|
|
6723
|
+
if (sourced !== inputImage)
|
|
6724
|
+
sourced.release();
|
|
6725
|
+
sourced = cpu;
|
|
6726
|
+
}
|
|
6727
|
+
const params = this.scalePreviewParams(this.opParams(input), sourced.previewScale);
|
|
6728
|
+
const out = applyFilter2(sourced, this.filterName, params);
|
|
6729
|
+
if (sourced !== inputImage)
|
|
6730
|
+
sourced.release();
|
|
6731
|
+
return { image: out };
|
|
6732
|
+
}
|
|
6733
|
+
}
|
|
6734
|
+
// src/task/image/blur/ImageBlurTask.ts
|
|
6735
|
+
import { CreateWorkflow as CreateWorkflow16, Workflow as Workflow17 } from "@workglow/task-graph";
|
|
6736
|
+
import { GpuImageSchema } from "@workglow/util/media";
|
|
6737
|
+
var inputSchema41 = {
|
|
5785
6738
|
type: "object",
|
|
5786
6739
|
properties: {
|
|
5787
|
-
|
|
5788
|
-
|
|
5789
|
-
|
|
5790
|
-
|
|
5791
|
-
|
|
5792
|
-
description: "Pixel data of the image"
|
|
5793
|
-
},
|
|
5794
|
-
width: {
|
|
5795
|
-
type: "integer",
|
|
5796
|
-
minimum: 1,
|
|
5797
|
-
title: "Width",
|
|
5798
|
-
description: "Width in pixels"
|
|
5799
|
-
},
|
|
5800
|
-
height: {
|
|
5801
|
-
type: "integer",
|
|
6740
|
+
image: GpuImageSchema({ title: "Image", description: "Source image" }),
|
|
6741
|
+
radius: {
|
|
6742
|
+
type: "number",
|
|
6743
|
+
title: "Radius",
|
|
6744
|
+
description: "Blur radius (1-10)",
|
|
5802
6745
|
minimum: 1,
|
|
5803
|
-
|
|
5804
|
-
|
|
5805
|
-
},
|
|
5806
|
-
channels: {
|
|
5807
|
-
type: "integer",
|
|
5808
|
-
enum: [1, 3, 4],
|
|
5809
|
-
title: "Channels",
|
|
5810
|
-
description: "1 (gray), 3 (RGB), or 4 (RGBA)"
|
|
6746
|
+
maximum: 10,
|
|
6747
|
+
default: 1
|
|
5811
6748
|
}
|
|
5812
6749
|
},
|
|
5813
|
-
|
|
5814
|
-
|
|
5815
|
-
|
|
5816
|
-
|
|
5817
|
-
description: "Raw pixel image data",
|
|
5818
|
-
...annotations
|
|
5819
|
-
});
|
|
5820
|
-
var ImageBinaryOrDataUriSchema = (annotations = {}) => ({
|
|
5821
|
-
oneOf: [
|
|
5822
|
-
ImageBinarySchema(annotations),
|
|
5823
|
-
{
|
|
5824
|
-
type: "string",
|
|
5825
|
-
format: "image:data-uri",
|
|
5826
|
-
title: annotations.title ?? "Image",
|
|
5827
|
-
description: annotations.description ?? "Image as ImageBinary or data URI (data:image/png;base64,...)"
|
|
5828
|
-
}
|
|
5829
|
-
]
|
|
5830
|
-
});
|
|
5831
|
-
var ColorSchema = (annotations = {}) => ({
|
|
6750
|
+
required: ["image"],
|
|
6751
|
+
additionalProperties: false
|
|
6752
|
+
};
|
|
6753
|
+
var outputSchema40 = {
|
|
5832
6754
|
type: "object",
|
|
5833
|
-
properties: {
|
|
5834
|
-
r: { type: "integer", minimum: 0, maximum: 255, title: "Red" },
|
|
5835
|
-
g: { type: "integer", minimum: 0, maximum: 255, title: "Green" },
|
|
5836
|
-
b: { type: "integer", minimum: 0, maximum: 255, title: "Blue" },
|
|
5837
|
-
a: { type: "integer", minimum: 0, maximum: 255, title: "Alpha", default: 255 }
|
|
5838
|
-
},
|
|
5839
|
-
required: ["r", "g", "b"],
|
|
5840
|
-
format: "color",
|
|
5841
|
-
additionalProperties: false,
|
|
5842
|
-
...annotations
|
|
5843
|
-
});
|
|
5844
|
-
var HexColorSchema = (annotations = {}) => ({
|
|
5845
|
-
type: "string",
|
|
5846
|
-
format: "color",
|
|
5847
|
-
pattern: "^#([0-9a-fA-F]{3,4}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})$",
|
|
5848
|
-
title: "Color (hex)",
|
|
5849
|
-
description: "Color as a `#RRGGBB[AA]` or `#RGB[A]` hex string",
|
|
5850
|
-
...annotations
|
|
5851
|
-
});
|
|
5852
|
-
var ColorValueSchema = (annotations = {}) => ({
|
|
5853
|
-
oneOf: [
|
|
5854
|
-
ColorSchema(),
|
|
5855
|
-
HexColorSchema({
|
|
5856
|
-
title: annotations.title ?? "Color",
|
|
5857
|
-
description: annotations.description ?? "Color as {r,g,b,a} object or `#RRGGBB[AA]` / `#RGB[A]` hex string"
|
|
5858
|
-
})
|
|
5859
|
-
],
|
|
5860
|
-
...annotations
|
|
5861
|
-
});
|
|
5862
|
-
var ColorObjectType = null;
|
|
5863
|
-
var ColorFromSchemaOptions = {
|
|
5864
|
-
...FromSchemaDefaultOptions,
|
|
5865
|
-
deserialize: [
|
|
5866
|
-
{
|
|
5867
|
-
pattern: { type: "object", format: "color" },
|
|
5868
|
-
output: ColorObjectType
|
|
5869
|
-
}
|
|
5870
|
-
]
|
|
5871
|
-
};
|
|
5872
|
-
|
|
5873
|
-
// src/task/image/imageTaskIo.ts
|
|
5874
|
-
import { Image, getImageRasterCodec } from "@workglow/util/media";
|
|
5875
|
-
async function produceImageOutput(inputImage, run) {
|
|
5876
|
-
const image = Image.is(inputImage) ? inputImage : Image.from(inputImage);
|
|
5877
|
-
const pixels = await image.getPixels();
|
|
5878
|
-
const out = await run(pixels);
|
|
5879
|
-
if (image.kind === "dataUri") {
|
|
5880
|
-
const mime = image.mimeType ?? "image/png";
|
|
5881
|
-
return getImageRasterCodec().encodeDataUri(out, mime);
|
|
5882
|
-
}
|
|
5883
|
-
return out;
|
|
5884
|
-
}
|
|
5885
|
-
|
|
5886
|
-
// src/task/image/ImageBlurTask.ts
|
|
5887
|
-
async function applyBlur(input) {
|
|
5888
|
-
const { radius = 1 } = input;
|
|
5889
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
5890
|
-
const { data: src, width, height, channels } = img;
|
|
5891
|
-
const kernelSize = radius * 2 + 1;
|
|
5892
|
-
const tmp = new Uint8ClampedArray(src.length);
|
|
5893
|
-
for (let y = 0;y < height; y++) {
|
|
5894
|
-
for (let c = 0;c < channels; c++) {
|
|
5895
|
-
let sum = 0;
|
|
5896
|
-
for (let k = -radius;k <= radius; k++) {
|
|
5897
|
-
const x = Math.max(0, Math.min(k, width - 1));
|
|
5898
|
-
sum += src[(y * width + x) * channels + c];
|
|
5899
|
-
}
|
|
5900
|
-
tmp[y * width * channels + c] = sum / kernelSize + 0.5 | 0;
|
|
5901
|
-
for (let x = 1;x < width; x++) {
|
|
5902
|
-
const addX = Math.min(x + radius, width - 1);
|
|
5903
|
-
const removeX = Math.max(x - radius - 1, 0);
|
|
5904
|
-
sum += src[(y * width + addX) * channels + c] - src[(y * width + removeX) * channels + c];
|
|
5905
|
-
tmp[(y * width + x) * channels + c] = sum / kernelSize + 0.5 | 0;
|
|
5906
|
-
}
|
|
5907
|
-
}
|
|
5908
|
-
}
|
|
5909
|
-
const dst = new Uint8ClampedArray(src.length);
|
|
5910
|
-
for (let x = 0;x < width; x++) {
|
|
5911
|
-
for (let c = 0;c < channels; c++) {
|
|
5912
|
-
let sum = 0;
|
|
5913
|
-
for (let k = -radius;k <= radius; k++) {
|
|
5914
|
-
const y = Math.max(0, Math.min(k, height - 1));
|
|
5915
|
-
sum += tmp[(y * width + x) * channels + c];
|
|
5916
|
-
}
|
|
5917
|
-
dst[x * channels + c] = sum / kernelSize + 0.5 | 0;
|
|
5918
|
-
for (let y = 1;y < height; y++) {
|
|
5919
|
-
const addY = Math.min(y + radius, height - 1);
|
|
5920
|
-
const removeY = Math.max(y - radius - 1, 0);
|
|
5921
|
-
sum += tmp[(addY * width + x) * channels + c] - tmp[(removeY * width + x) * channels + c];
|
|
5922
|
-
dst[(y * width + x) * channels + c] = sum / kernelSize + 0.5 | 0;
|
|
5923
|
-
}
|
|
5924
|
-
}
|
|
5925
|
-
}
|
|
5926
|
-
return { data: dst, width, height, channels };
|
|
5927
|
-
});
|
|
5928
|
-
return { image };
|
|
5929
|
-
}
|
|
5930
|
-
var inputSchema41 = {
|
|
5931
|
-
type: "object",
|
|
5932
|
-
properties: {
|
|
5933
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Source image" }),
|
|
5934
|
-
radius: {
|
|
5935
|
-
type: "integer",
|
|
5936
|
-
title: "Radius",
|
|
5937
|
-
description: "Blur radius (1-10)",
|
|
5938
|
-
minimum: 1,
|
|
5939
|
-
maximum: 10,
|
|
5940
|
-
default: 1
|
|
5941
|
-
}
|
|
5942
|
-
},
|
|
5943
|
-
required: ["image"],
|
|
5944
|
-
additionalProperties: false
|
|
5945
|
-
};
|
|
5946
|
-
var outputSchema40 = {
|
|
5947
|
-
type: "object",
|
|
5948
|
-
properties: {
|
|
5949
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Blurred image" })
|
|
5950
|
-
},
|
|
6755
|
+
properties: { image: GpuImageSchema({ title: "Image", description: "Blurred image" }) },
|
|
5951
6756
|
required: ["image"],
|
|
5952
6757
|
additionalProperties: false
|
|
5953
6758
|
};
|
|
5954
6759
|
|
|
5955
|
-
class ImageBlurTask extends
|
|
6760
|
+
class ImageBlurTask extends ImageFilterTask {
|
|
5956
6761
|
static type = "ImageBlurTask";
|
|
5957
6762
|
static category = "Image";
|
|
5958
6763
|
static title = "Blur Image";
|
|
@@ -5963,80 +6768,60 @@ class ImageBlurTask extends Task42 {
|
|
|
5963
6768
|
static outputSchema() {
|
|
5964
6769
|
return outputSchema40;
|
|
5965
6770
|
}
|
|
5966
|
-
|
|
5967
|
-
|
|
6771
|
+
filterName = "blur";
|
|
6772
|
+
opParams(input) {
|
|
6773
|
+
return { radius: input.radius ?? 1 };
|
|
5968
6774
|
}
|
|
5969
|
-
|
|
5970
|
-
return
|
|
6775
|
+
scalePreviewParams({ radius }, s) {
|
|
6776
|
+
return { radius: Math.max(1, Math.round(radius * s)) };
|
|
5971
6777
|
}
|
|
5972
6778
|
}
|
|
5973
6779
|
Workflow17.prototype.imageBlur = CreateWorkflow16(ImageBlurTask);
|
|
5974
|
-
// src/task/image/ImageBorderTask.ts
|
|
5975
|
-
import {
|
|
5976
|
-
|
|
5977
|
-
Task as Task43,
|
|
5978
|
-
Workflow as Workflow18
|
|
5979
|
-
} from "@workglow/task-graph";
|
|
5980
|
-
import { resolveColor } from "@workglow/util/media";
|
|
5981
|
-
async function applyBorder(input) {
|
|
5982
|
-
const { borderWidth: bw = 1 } = input;
|
|
5983
|
-
const color = resolveColor(input.color);
|
|
5984
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
5985
|
-
const { data: src, width: srcW, height: srcH, channels: srcCh } = img;
|
|
5986
|
-
const outCh = 4;
|
|
5987
|
-
const dstW = srcW + bw * 2;
|
|
5988
|
-
const dstH = srcH + bw * 2;
|
|
5989
|
-
const dst = new Uint8ClampedArray(dstW * dstH * outCh);
|
|
5990
|
-
const r = color.r;
|
|
5991
|
-
const g = color.g;
|
|
5992
|
-
const b = color.b;
|
|
5993
|
-
const a = color.a;
|
|
5994
|
-
for (let i = 0;i < dst.length; i += outCh) {
|
|
5995
|
-
dst[i] = r;
|
|
5996
|
-
dst[i + 1] = g;
|
|
5997
|
-
dst[i + 2] = b;
|
|
5998
|
-
dst[i + 3] = a;
|
|
5999
|
-
}
|
|
6000
|
-
for (let y = 0;y < srcH; y++) {
|
|
6001
|
-
for (let x = 0;x < srcW; x++) {
|
|
6002
|
-
const srcIdx = (y * srcW + x) * srcCh;
|
|
6003
|
-
const dstIdx = ((y + bw) * dstW + (x + bw)) * outCh;
|
|
6004
|
-
dst[dstIdx] = src[srcIdx];
|
|
6005
|
-
dst[dstIdx + 1] = srcCh >= 3 ? src[srcIdx + 1] : src[srcIdx];
|
|
6006
|
-
dst[dstIdx + 2] = srcCh >= 3 ? src[srcIdx + 2] : src[srcIdx];
|
|
6007
|
-
dst[dstIdx + 3] = srcCh === 4 ? src[srcIdx + 3] : 255;
|
|
6008
|
-
}
|
|
6009
|
-
}
|
|
6010
|
-
return { data: dst, width: dstW, height: dstH, channels: outCh };
|
|
6011
|
-
});
|
|
6012
|
-
return { image };
|
|
6013
|
-
}
|
|
6780
|
+
// src/task/image/border/ImageBorderTask.ts
|
|
6781
|
+
import { CreateWorkflow as CreateWorkflow17, Workflow as Workflow18 } from "@workglow/task-graph";
|
|
6782
|
+
import { GpuImageSchema as GpuImageSchema2 } from "@workglow/util/media";
|
|
6014
6783
|
var inputSchema42 = {
|
|
6015
6784
|
type: "object",
|
|
6016
6785
|
properties: {
|
|
6017
|
-
image:
|
|
6786
|
+
image: GpuImageSchema2({ title: "Image", description: "Source image" }),
|
|
6018
6787
|
borderWidth: {
|
|
6019
6788
|
type: "integer",
|
|
6020
6789
|
title: "Border Width",
|
|
6021
6790
|
description: "Border width in pixels",
|
|
6022
6791
|
minimum: 1,
|
|
6023
|
-
default:
|
|
6792
|
+
default: 10
|
|
6024
6793
|
},
|
|
6025
|
-
color:
|
|
6794
|
+
color: {
|
|
6795
|
+
oneOf: [
|
|
6796
|
+
{ type: "string", pattern: "^#([0-9a-fA-F]{3,4}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})$" },
|
|
6797
|
+
{
|
|
6798
|
+
type: "object",
|
|
6799
|
+
properties: {
|
|
6800
|
+
r: { type: "integer", minimum: 0, maximum: 255 },
|
|
6801
|
+
g: { type: "integer", minimum: 0, maximum: 255 },
|
|
6802
|
+
b: { type: "integer", minimum: 0, maximum: 255 },
|
|
6803
|
+
a: { type: "integer", minimum: 0, maximum: 255 }
|
|
6804
|
+
},
|
|
6805
|
+
required: ["r", "g", "b"],
|
|
6806
|
+
additionalProperties: false
|
|
6807
|
+
}
|
|
6808
|
+
],
|
|
6809
|
+
title: "Color",
|
|
6810
|
+
description: "Border color",
|
|
6811
|
+
default: "#000000"
|
|
6812
|
+
}
|
|
6026
6813
|
},
|
|
6027
6814
|
required: ["image", "color"],
|
|
6028
6815
|
additionalProperties: false
|
|
6029
6816
|
};
|
|
6030
6817
|
var outputSchema41 = {
|
|
6031
6818
|
type: "object",
|
|
6032
|
-
properties: {
|
|
6033
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Image with border" })
|
|
6034
|
-
},
|
|
6819
|
+
properties: { image: GpuImageSchema2({ title: "Image", description: "Image with border" }) },
|
|
6035
6820
|
required: ["image"],
|
|
6036
6821
|
additionalProperties: false
|
|
6037
6822
|
};
|
|
6038
6823
|
|
|
6039
|
-
class ImageBorderTask extends
|
|
6824
|
+
class ImageBorderTask extends ImageFilterTask {
|
|
6040
6825
|
static type = "ImageBorderTask";
|
|
6041
6826
|
static category = "Image";
|
|
6042
6827
|
static title = "Add Border";
|
|
@@ -6047,45 +6832,25 @@ class ImageBorderTask extends Task43 {
|
|
|
6047
6832
|
static outputSchema() {
|
|
6048
6833
|
return outputSchema41;
|
|
6049
6834
|
}
|
|
6050
|
-
|
|
6051
|
-
|
|
6835
|
+
filterName = "border";
|
|
6836
|
+
opParams(input) {
|
|
6837
|
+
return {
|
|
6838
|
+
borderWidth: input.borderWidth ?? 10,
|
|
6839
|
+
color: input.color
|
|
6840
|
+
};
|
|
6052
6841
|
}
|
|
6053
|
-
|
|
6054
|
-
return
|
|
6842
|
+
scalePreviewParams({ borderWidth, color }, s) {
|
|
6843
|
+
return { borderWidth: Math.max(1, Math.round(borderWidth * s)), color };
|
|
6055
6844
|
}
|
|
6056
6845
|
}
|
|
6057
6846
|
Workflow18.prototype.imageBorder = CreateWorkflow17(ImageBorderTask);
|
|
6058
|
-
// src/task/image/ImageBrightnessTask.ts
|
|
6059
|
-
import {
|
|
6060
|
-
|
|
6061
|
-
Task as Task44,
|
|
6062
|
-
Workflow as Workflow19
|
|
6063
|
-
} from "@workglow/task-graph";
|
|
6064
|
-
async function applyBrightness(input) {
|
|
6065
|
-
const amount = input.amount ?? 0;
|
|
6066
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
6067
|
-
const { data: src, width, height, channels } = img;
|
|
6068
|
-
const dst = new Uint8ClampedArray(src.length);
|
|
6069
|
-
if (channels === 4) {
|
|
6070
|
-
for (let i = 0;i < src.length; i += 4) {
|
|
6071
|
-
dst[i] = src[i] + amount;
|
|
6072
|
-
dst[i + 1] = src[i + 1] + amount;
|
|
6073
|
-
dst[i + 2] = src[i + 2] + amount;
|
|
6074
|
-
dst[i + 3] = src[i + 3];
|
|
6075
|
-
}
|
|
6076
|
-
} else {
|
|
6077
|
-
for (let i = 0;i < src.length; i++) {
|
|
6078
|
-
dst[i] = src[i] + amount;
|
|
6079
|
-
}
|
|
6080
|
-
}
|
|
6081
|
-
return { data: dst, width, height, channels };
|
|
6082
|
-
});
|
|
6083
|
-
return { image };
|
|
6084
|
-
}
|
|
6847
|
+
// src/task/image/brightness/ImageBrightnessTask.ts
|
|
6848
|
+
import { CreateWorkflow as CreateWorkflow18, Workflow as Workflow19 } from "@workglow/task-graph";
|
|
6849
|
+
import { GpuImageSchema as GpuImageSchema3 } from "@workglow/util/media";
|
|
6085
6850
|
var inputSchema43 = {
|
|
6086
6851
|
type: "object",
|
|
6087
6852
|
properties: {
|
|
6088
|
-
image:
|
|
6853
|
+
image: GpuImageSchema3({ title: "Image", description: "Source image" }),
|
|
6089
6854
|
amount: {
|
|
6090
6855
|
type: "number",
|
|
6091
6856
|
title: "Amount",
|
|
@@ -6100,14 +6865,12 @@ var inputSchema43 = {
|
|
|
6100
6865
|
};
|
|
6101
6866
|
var outputSchema42 = {
|
|
6102
6867
|
type: "object",
|
|
6103
|
-
properties: {
|
|
6104
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Brightness-adjusted image" })
|
|
6105
|
-
},
|
|
6868
|
+
properties: { image: GpuImageSchema3({ title: "Image", description: "Brightness-adjusted image" }) },
|
|
6106
6869
|
required: ["image"],
|
|
6107
6870
|
additionalProperties: false
|
|
6108
6871
|
};
|
|
6109
6872
|
|
|
6110
|
-
class ImageBrightnessTask extends
|
|
6873
|
+
class ImageBrightnessTask extends ImageFilterTask {
|
|
6111
6874
|
static type = "ImageBrightnessTask";
|
|
6112
6875
|
static category = "Image";
|
|
6113
6876
|
static title = "Adjust Brightness";
|
|
@@ -6118,50 +6881,19 @@ class ImageBrightnessTask extends Task44 {
|
|
|
6118
6881
|
static outputSchema() {
|
|
6119
6882
|
return outputSchema42;
|
|
6120
6883
|
}
|
|
6121
|
-
|
|
6122
|
-
|
|
6123
|
-
|
|
6124
|
-
async executePreview(input, _context) {
|
|
6125
|
-
return await applyBrightness(input);
|
|
6884
|
+
filterName = "brightness";
|
|
6885
|
+
opParams(input) {
|
|
6886
|
+
return { amount: input.amount ?? 0 };
|
|
6126
6887
|
}
|
|
6127
6888
|
}
|
|
6128
6889
|
Workflow19.prototype.imageBrightness = CreateWorkflow18(ImageBrightnessTask);
|
|
6129
|
-
// src/task/image/ImageContrastTask.ts
|
|
6130
|
-
import {
|
|
6131
|
-
|
|
6132
|
-
Task as Task45,
|
|
6133
|
-
Workflow as Workflow20
|
|
6134
|
-
} from "@workglow/task-graph";
|
|
6135
|
-
async function applyContrast(input) {
|
|
6136
|
-
const amount = input.amount ?? 0;
|
|
6137
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
6138
|
-
const { data: src, width, height, channels } = img;
|
|
6139
|
-
const factor = 259 * (amount + 255) / (255 * (259 - amount));
|
|
6140
|
-
const lut = new Uint8ClampedArray(256);
|
|
6141
|
-
for (let i = 0;i < 256; i++) {
|
|
6142
|
-
lut[i] = factor * (i - 128) + 128;
|
|
6143
|
-
}
|
|
6144
|
-
const dst = new Uint8ClampedArray(src.length);
|
|
6145
|
-
if (channels === 4) {
|
|
6146
|
-
for (let i = 0;i < src.length; i += 4) {
|
|
6147
|
-
dst[i] = lut[src[i]];
|
|
6148
|
-
dst[i + 1] = lut[src[i + 1]];
|
|
6149
|
-
dst[i + 2] = lut[src[i + 2]];
|
|
6150
|
-
dst[i + 3] = src[i + 3];
|
|
6151
|
-
}
|
|
6152
|
-
} else {
|
|
6153
|
-
for (let i = 0;i < src.length; i++) {
|
|
6154
|
-
dst[i] = lut[src[i]];
|
|
6155
|
-
}
|
|
6156
|
-
}
|
|
6157
|
-
return { data: dst, width, height, channels };
|
|
6158
|
-
});
|
|
6159
|
-
return { image };
|
|
6160
|
-
}
|
|
6890
|
+
// src/task/image/contrast/ImageContrastTask.ts
|
|
6891
|
+
import { CreateWorkflow as CreateWorkflow19, Workflow as Workflow20 } from "@workglow/task-graph";
|
|
6892
|
+
import { GpuImageSchema as GpuImageSchema4 } from "@workglow/util/media";
|
|
6161
6893
|
var inputSchema44 = {
|
|
6162
6894
|
type: "object",
|
|
6163
6895
|
properties: {
|
|
6164
|
-
image:
|
|
6896
|
+
image: GpuImageSchema4({ title: "Image", description: "Source image" }),
|
|
6165
6897
|
amount: {
|
|
6166
6898
|
type: "number",
|
|
6167
6899
|
title: "Amount",
|
|
@@ -6176,14 +6908,12 @@ var inputSchema44 = {
|
|
|
6176
6908
|
};
|
|
6177
6909
|
var outputSchema43 = {
|
|
6178
6910
|
type: "object",
|
|
6179
|
-
properties: {
|
|
6180
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Contrast-adjusted image" })
|
|
6181
|
-
},
|
|
6911
|
+
properties: { image: GpuImageSchema4({ title: "Image", description: "Contrast-adjusted image" }) },
|
|
6182
6912
|
required: ["image"],
|
|
6183
6913
|
additionalProperties: false
|
|
6184
6914
|
};
|
|
6185
6915
|
|
|
6186
|
-
class ImageContrastTask extends
|
|
6916
|
+
class ImageContrastTask extends ImageFilterTask {
|
|
6187
6917
|
static type = "ImageContrastTask";
|
|
6188
6918
|
static category = "Image";
|
|
6189
6919
|
static title = "Adjust Contrast";
|
|
@@ -6194,67 +6924,35 @@ class ImageContrastTask extends Task45 {
|
|
|
6194
6924
|
static outputSchema() {
|
|
6195
6925
|
return outputSchema43;
|
|
6196
6926
|
}
|
|
6197
|
-
|
|
6198
|
-
|
|
6199
|
-
|
|
6200
|
-
async executePreview(input, _context) {
|
|
6201
|
-
return await applyContrast(input);
|
|
6927
|
+
filterName = "contrast";
|
|
6928
|
+
opParams(input) {
|
|
6929
|
+
return { amount: input.amount ?? 0 };
|
|
6202
6930
|
}
|
|
6203
6931
|
}
|
|
6204
6932
|
Workflow20.prototype.imageContrast = CreateWorkflow19(ImageContrastTask);
|
|
6205
|
-
// src/task/image/ImageCropTask.ts
|
|
6206
|
-
import {
|
|
6207
|
-
|
|
6208
|
-
Task as Task46,
|
|
6209
|
-
Workflow as Workflow21
|
|
6210
|
-
} from "@workglow/task-graph";
|
|
6211
|
-
async function cropImage(input) {
|
|
6212
|
-
const { x: rawX, y: rawY, width: rawW, height: rawH } = input;
|
|
6213
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
6214
|
-
const { data: src, width: srcW, height: srcH, channels } = img;
|
|
6215
|
-
if (srcW < 1 || srcH < 1) {
|
|
6216
|
-
throw new RangeError("Cannot crop an empty image");
|
|
6217
|
-
}
|
|
6218
|
-
if (rawX < 0 || rawX >= srcW || rawY < 0 || rawY >= srcH) {
|
|
6219
|
-
throw new RangeError("Crop origin is outside the source image bounds");
|
|
6220
|
-
}
|
|
6221
|
-
const x = rawX;
|
|
6222
|
-
const y = rawY;
|
|
6223
|
-
const w = Math.min(rawW, srcW - x);
|
|
6224
|
-
const h = Math.min(rawH, srcH - y);
|
|
6225
|
-
const dst = new Uint8ClampedArray(w * h * channels);
|
|
6226
|
-
const rowBytes = w * channels;
|
|
6227
|
-
for (let row = 0;row < h; row++) {
|
|
6228
|
-
const srcOffset = ((y + row) * srcW + x) * channels;
|
|
6229
|
-
const dstOffset = row * rowBytes;
|
|
6230
|
-
dst.set(src.subarray(srcOffset, srcOffset + rowBytes), dstOffset);
|
|
6231
|
-
}
|
|
6232
|
-
return { data: dst, width: w, height: h, channels };
|
|
6233
|
-
});
|
|
6234
|
-
return { image };
|
|
6235
|
-
}
|
|
6933
|
+
// src/task/image/crop/ImageCropTask.ts
|
|
6934
|
+
import { CreateWorkflow as CreateWorkflow20, Workflow as Workflow21 } from "@workglow/task-graph";
|
|
6935
|
+
import { GpuImageSchema as GpuImageSchema5 } from "@workglow/util/media";
|
|
6236
6936
|
var inputSchema45 = {
|
|
6237
6937
|
type: "object",
|
|
6238
6938
|
properties: {
|
|
6239
|
-
image:
|
|
6240
|
-
|
|
6241
|
-
|
|
6939
|
+
image: GpuImageSchema5({ title: "Image", description: "Source image" }),
|
|
6940
|
+
left: { type: "integer", title: "Left", description: "Left offset", minimum: 0, default: 0 },
|
|
6941
|
+
top: { type: "integer", title: "Top", description: "Top offset", minimum: 0, default: 0 },
|
|
6242
6942
|
width: { type: "integer", title: "Width", description: "Crop width", minimum: 1 },
|
|
6243
6943
|
height: { type: "integer", title: "Height", description: "Crop height", minimum: 1 }
|
|
6244
6944
|
},
|
|
6245
|
-
required: ["image", "
|
|
6945
|
+
required: ["image", "left", "top", "width", "height"],
|
|
6246
6946
|
additionalProperties: false
|
|
6247
6947
|
};
|
|
6248
6948
|
var outputSchema44 = {
|
|
6249
6949
|
type: "object",
|
|
6250
|
-
properties: {
|
|
6251
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Cropped image" })
|
|
6252
|
-
},
|
|
6950
|
+
properties: { image: GpuImageSchema5({ title: "Image", description: "Cropped image" }) },
|
|
6253
6951
|
required: ["image"],
|
|
6254
6952
|
additionalProperties: false
|
|
6255
6953
|
};
|
|
6256
6954
|
|
|
6257
|
-
class ImageCropTask extends
|
|
6955
|
+
class ImageCropTask extends ImageFilterTask {
|
|
6258
6956
|
static type = "ImageCropTask";
|
|
6259
6957
|
static category = "Image";
|
|
6260
6958
|
static title = "Crop Image";
|
|
@@ -6265,56 +6963,38 @@ class ImageCropTask extends Task46 {
|
|
|
6265
6963
|
static outputSchema() {
|
|
6266
6964
|
return outputSchema44;
|
|
6267
6965
|
}
|
|
6268
|
-
|
|
6269
|
-
|
|
6966
|
+
filterName = "crop";
|
|
6967
|
+
opParams(input) {
|
|
6968
|
+
return {
|
|
6969
|
+
left: input.left,
|
|
6970
|
+
top: input.top,
|
|
6971
|
+
width: input.width,
|
|
6972
|
+
height: input.height
|
|
6973
|
+
};
|
|
6270
6974
|
}
|
|
6271
|
-
|
|
6272
|
-
return
|
|
6975
|
+
scalePreviewParams({ left, top, width, height }, s) {
|
|
6976
|
+
return {
|
|
6977
|
+
left: Math.round(left * s),
|
|
6978
|
+
top: Math.round(top * s),
|
|
6979
|
+
width: Math.max(1, Math.round(width * s)),
|
|
6980
|
+
height: Math.max(1, Math.round(height * s))
|
|
6981
|
+
};
|
|
6273
6982
|
}
|
|
6274
6983
|
}
|
|
6275
6984
|
Workflow21.prototype.imageCrop = CreateWorkflow20(ImageCropTask);
|
|
6276
|
-
// src/task/image/ImageFlipTask.ts
|
|
6277
|
-
import {
|
|
6278
|
-
|
|
6279
|
-
Task as Task47,
|
|
6280
|
-
Workflow as Workflow22
|
|
6281
|
-
} from "@workglow/task-graph";
|
|
6282
|
-
async function flipImage(input) {
|
|
6283
|
-
const { direction } = input;
|
|
6284
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
6285
|
-
const { data: src, width, height, channels } = img;
|
|
6286
|
-
const dst = new Uint8ClampedArray(src.length);
|
|
6287
|
-
const rowBytes = width * channels;
|
|
6288
|
-
if (direction === "vertical") {
|
|
6289
|
-
for (let y = 0;y < height; y++) {
|
|
6290
|
-
const srcOffset = y * rowBytes;
|
|
6291
|
-
const dstOffset = (height - 1 - y) * rowBytes;
|
|
6292
|
-
dst.set(src.subarray(srcOffset, srcOffset + rowBytes), dstOffset);
|
|
6293
|
-
}
|
|
6294
|
-
} else {
|
|
6295
|
-
for (let y = 0;y < height; y++) {
|
|
6296
|
-
for (let x = 0;x < width; x++) {
|
|
6297
|
-
const srcIdx = (y * width + x) * channels;
|
|
6298
|
-
const dstIdx = (y * width + (width - 1 - x)) * channels;
|
|
6299
|
-
for (let c = 0;c < channels; c++) {
|
|
6300
|
-
dst[dstIdx + c] = src[srcIdx + c];
|
|
6301
|
-
}
|
|
6302
|
-
}
|
|
6303
|
-
}
|
|
6304
|
-
}
|
|
6305
|
-
return { data: dst, width, height, channels };
|
|
6306
|
-
});
|
|
6307
|
-
return { image };
|
|
6308
|
-
}
|
|
6985
|
+
// src/task/image/flip/ImageFlipTask.ts
|
|
6986
|
+
import { CreateWorkflow as CreateWorkflow21, Workflow as Workflow22 } from "@workglow/task-graph";
|
|
6987
|
+
import { GpuImageSchema as GpuImageSchema6 } from "@workglow/util/media";
|
|
6309
6988
|
var inputSchema46 = {
|
|
6310
6989
|
type: "object",
|
|
6311
6990
|
properties: {
|
|
6312
|
-
image:
|
|
6991
|
+
image: GpuImageSchema6({ title: "Image", description: "Source image" }),
|
|
6313
6992
|
direction: {
|
|
6314
6993
|
type: "string",
|
|
6315
6994
|
enum: ["horizontal", "vertical"],
|
|
6316
6995
|
title: "Direction",
|
|
6317
|
-
description: "Flip direction"
|
|
6996
|
+
description: "Flip direction",
|
|
6997
|
+
default: "horizontal"
|
|
6318
6998
|
}
|
|
6319
6999
|
},
|
|
6320
7000
|
required: ["image", "direction"],
|
|
@@ -6322,14 +7002,12 @@ var inputSchema46 = {
|
|
|
6322
7002
|
};
|
|
6323
7003
|
var outputSchema45 = {
|
|
6324
7004
|
type: "object",
|
|
6325
|
-
properties: {
|
|
6326
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Flipped image" })
|
|
6327
|
-
},
|
|
7005
|
+
properties: { image: GpuImageSchema6({ title: "Image", description: "Flipped image" }) },
|
|
6328
7006
|
required: ["image"],
|
|
6329
7007
|
additionalProperties: false
|
|
6330
7008
|
};
|
|
6331
7009
|
|
|
6332
|
-
class ImageFlipTask extends
|
|
7010
|
+
class ImageFlipTask extends ImageFilterTask {
|
|
6333
7011
|
static type = "ImageFlipTask";
|
|
6334
7012
|
static category = "Image";
|
|
6335
7013
|
static title = "Flip Image";
|
|
@@ -6340,54 +7018,29 @@ class ImageFlipTask extends Task47 {
|
|
|
6340
7018
|
static outputSchema() {
|
|
6341
7019
|
return outputSchema45;
|
|
6342
7020
|
}
|
|
6343
|
-
|
|
6344
|
-
|
|
6345
|
-
|
|
6346
|
-
async executePreview(input, _context) {
|
|
6347
|
-
return await flipImage(input);
|
|
7021
|
+
filterName = "flip";
|
|
7022
|
+
opParams(input) {
|
|
7023
|
+
return { direction: input.direction ?? "horizontal" };
|
|
6348
7024
|
}
|
|
6349
7025
|
}
|
|
6350
7026
|
Workflow22.prototype.imageFlip = CreateWorkflow21(ImageFlipTask);
|
|
6351
|
-
// src/task/image/ImageGrayscaleTask.ts
|
|
6352
|
-
import {
|
|
6353
|
-
|
|
6354
|
-
Task as Task48,
|
|
6355
|
-
Workflow as Workflow23
|
|
6356
|
-
} from "@workglow/task-graph";
|
|
6357
|
-
async function applyGrayscale(input) {
|
|
6358
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
6359
|
-
const { data: src, width, height, channels } = img;
|
|
6360
|
-
if (channels === 1) {
|
|
6361
|
-
return { data: new Uint8ClampedArray(src), width, height, channels: 1 };
|
|
6362
|
-
}
|
|
6363
|
-
const pixelCount = width * height;
|
|
6364
|
-
const dst = new Uint8ClampedArray(pixelCount);
|
|
6365
|
-
for (let i = 0;i < pixelCount; i++) {
|
|
6366
|
-
const idx = i * channels;
|
|
6367
|
-
dst[i] = src[idx] * 77 + src[idx + 1] * 150 + src[idx + 2] * 29 >> 8;
|
|
6368
|
-
}
|
|
6369
|
-
return { data: dst, width, height, channels: 1 };
|
|
6370
|
-
});
|
|
6371
|
-
return { image };
|
|
6372
|
-
}
|
|
7027
|
+
// src/task/image/grayscale/ImageGrayscaleTask.ts
|
|
7028
|
+
import { CreateWorkflow as CreateWorkflow22, Workflow as Workflow23 } from "@workglow/task-graph";
|
|
7029
|
+
import { GpuImageSchema as GpuImageSchema7 } from "@workglow/util/media";
|
|
6373
7030
|
var inputSchema47 = {
|
|
6374
7031
|
type: "object",
|
|
6375
|
-
properties: {
|
|
6376
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Source image" })
|
|
6377
|
-
},
|
|
7032
|
+
properties: { image: GpuImageSchema7({ title: "Image", description: "Source image" }) },
|
|
6378
7033
|
required: ["image"],
|
|
6379
7034
|
additionalProperties: false
|
|
6380
7035
|
};
|
|
6381
7036
|
var outputSchema46 = {
|
|
6382
7037
|
type: "object",
|
|
6383
|
-
properties: {
|
|
6384
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Grayscale image" })
|
|
6385
|
-
},
|
|
7038
|
+
properties: { image: GpuImageSchema7({ title: "Image", description: "Grayscale image" }) },
|
|
6386
7039
|
required: ["image"],
|
|
6387
7040
|
additionalProperties: false
|
|
6388
7041
|
};
|
|
6389
7042
|
|
|
6390
|
-
class ImageGrayscaleTask extends
|
|
7043
|
+
class ImageGrayscaleTask extends ImageFilterTask {
|
|
6391
7044
|
static type = "ImageGrayscaleTask";
|
|
6392
7045
|
static category = "Image";
|
|
6393
7046
|
static title = "Grayscale";
|
|
@@ -6398,58 +7051,29 @@ class ImageGrayscaleTask extends Task48 {
|
|
|
6398
7051
|
static outputSchema() {
|
|
6399
7052
|
return outputSchema46;
|
|
6400
7053
|
}
|
|
6401
|
-
|
|
6402
|
-
|
|
6403
|
-
|
|
6404
|
-
async executePreview(input, _context) {
|
|
6405
|
-
return await applyGrayscale(input);
|
|
7054
|
+
filterName = "grayscale";
|
|
7055
|
+
opParams(_input) {
|
|
7056
|
+
return;
|
|
6406
7057
|
}
|
|
6407
7058
|
}
|
|
6408
7059
|
Workflow23.prototype.imageGrayscale = CreateWorkflow22(ImageGrayscaleTask);
|
|
6409
|
-
// src/task/image/ImageInvertTask.ts
|
|
6410
|
-
import {
|
|
6411
|
-
|
|
6412
|
-
Task as Task49,
|
|
6413
|
-
Workflow as Workflow24
|
|
6414
|
-
} from "@workglow/task-graph";
|
|
6415
|
-
async function invertImage(input) {
|
|
6416
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
6417
|
-
const { data: src, width, height, channels } = img;
|
|
6418
|
-
const dst = new Uint8ClampedArray(src.length);
|
|
6419
|
-
if (channels === 4) {
|
|
6420
|
-
for (let i = 0;i < src.length; i += 4) {
|
|
6421
|
-
dst[i] = 255 - src[i];
|
|
6422
|
-
dst[i + 1] = 255 - src[i + 1];
|
|
6423
|
-
dst[i + 2] = 255 - src[i + 2];
|
|
6424
|
-
dst[i + 3] = src[i + 3];
|
|
6425
|
-
}
|
|
6426
|
-
} else {
|
|
6427
|
-
for (let i = 0;i < src.length; i++) {
|
|
6428
|
-
dst[i] = 255 - src[i];
|
|
6429
|
-
}
|
|
6430
|
-
}
|
|
6431
|
-
return { data: dst, width, height, channels };
|
|
6432
|
-
});
|
|
6433
|
-
return { image };
|
|
6434
|
-
}
|
|
7060
|
+
// src/task/image/invert/ImageInvertTask.ts
|
|
7061
|
+
import { CreateWorkflow as CreateWorkflow23, Workflow as Workflow24 } from "@workglow/task-graph";
|
|
7062
|
+
import { GpuImageSchema as GpuImageSchema8 } from "@workglow/util/media";
|
|
6435
7063
|
var inputSchema48 = {
|
|
6436
7064
|
type: "object",
|
|
6437
|
-
properties: {
|
|
6438
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Source image" })
|
|
6439
|
-
},
|
|
7065
|
+
properties: { image: GpuImageSchema8({ title: "Image", description: "Source image" }) },
|
|
6440
7066
|
required: ["image"],
|
|
6441
7067
|
additionalProperties: false
|
|
6442
7068
|
};
|
|
6443
7069
|
var outputSchema47 = {
|
|
6444
7070
|
type: "object",
|
|
6445
|
-
properties: {
|
|
6446
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Inverted image" })
|
|
6447
|
-
},
|
|
7071
|
+
properties: { image: GpuImageSchema8({ title: "Image", description: "Inverted image" }) },
|
|
6448
7072
|
required: ["image"],
|
|
6449
7073
|
additionalProperties: false
|
|
6450
7074
|
};
|
|
6451
7075
|
|
|
6452
|
-
class ImageInvertTask extends
|
|
7076
|
+
class ImageInvertTask extends ImageFilterTask {
|
|
6453
7077
|
static type = "ImageInvertTask";
|
|
6454
7078
|
static category = "Image";
|
|
6455
7079
|
static title = "Invert Colors";
|
|
@@ -6460,64 +7084,26 @@ class ImageInvertTask extends Task49 {
|
|
|
6460
7084
|
static outputSchema() {
|
|
6461
7085
|
return outputSchema47;
|
|
6462
7086
|
}
|
|
6463
|
-
|
|
6464
|
-
|
|
6465
|
-
|
|
6466
|
-
async executePreview(input, _context) {
|
|
6467
|
-
return await invertImage(input);
|
|
7087
|
+
filterName = "invert";
|
|
7088
|
+
opParams(_input) {
|
|
7089
|
+
return;
|
|
6468
7090
|
}
|
|
6469
7091
|
}
|
|
6470
7092
|
Workflow24.prototype.imageInvert = CreateWorkflow23(ImageInvertTask);
|
|
6471
|
-
// src/task/image/ImagePixelateTask.ts
|
|
6472
|
-
import {
|
|
6473
|
-
|
|
6474
|
-
Task as Task50,
|
|
6475
|
-
Workflow as Workflow25
|
|
6476
|
-
} from "@workglow/task-graph";
|
|
6477
|
-
async function pixelateImage(input) {
|
|
6478
|
-
const { blockSize = 8 } = input;
|
|
6479
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
6480
|
-
const { data: src, width, height, channels } = img;
|
|
6481
|
-
const dst = new Uint8ClampedArray(src.length);
|
|
6482
|
-
for (let by = 0;by < height; by += blockSize) {
|
|
6483
|
-
const blockH = Math.min(blockSize, height - by);
|
|
6484
|
-
for (let bx = 0;bx < width; bx += blockSize) {
|
|
6485
|
-
const blockW = Math.min(blockSize, width - bx);
|
|
6486
|
-
const blockArea = blockW * blockH;
|
|
6487
|
-
const sums = new Array(channels).fill(0);
|
|
6488
|
-
for (let y = by;y < by + blockH; y++) {
|
|
6489
|
-
for (let x = bx;x < bx + blockW; x++) {
|
|
6490
|
-
const idx = (y * width + x) * channels;
|
|
6491
|
-
for (let c = 0;c < channels; c++) {
|
|
6492
|
-
sums[c] += src[idx + c];
|
|
6493
|
-
}
|
|
6494
|
-
}
|
|
6495
|
-
}
|
|
6496
|
-
const avg = sums.map((s) => s / blockArea + 0.5 | 0);
|
|
6497
|
-
for (let y = by;y < by + blockH; y++) {
|
|
6498
|
-
for (let x = bx;x < bx + blockW; x++) {
|
|
6499
|
-
const idx = (y * width + x) * channels;
|
|
6500
|
-
for (let c = 0;c < channels; c++) {
|
|
6501
|
-
dst[idx + c] = avg[c];
|
|
6502
|
-
}
|
|
6503
|
-
}
|
|
6504
|
-
}
|
|
6505
|
-
}
|
|
6506
|
-
}
|
|
6507
|
-
return { data: dst, width, height, channels };
|
|
6508
|
-
});
|
|
6509
|
-
return { image };
|
|
6510
|
-
}
|
|
7093
|
+
// src/task/image/pixelate/ImagePixelateTask.ts
|
|
7094
|
+
import { CreateWorkflow as CreateWorkflow24, Workflow as Workflow25 } from "@workglow/task-graph";
|
|
7095
|
+
import { GpuImageSchema as GpuImageSchema9 } from "@workglow/util/media";
|
|
6511
7096
|
var inputSchema49 = {
|
|
6512
7097
|
type: "object",
|
|
6513
7098
|
properties: {
|
|
6514
|
-
image:
|
|
7099
|
+
image: GpuImageSchema9({ title: "Image", description: "Source image" }),
|
|
6515
7100
|
blockSize: {
|
|
6516
7101
|
type: "integer",
|
|
6517
7102
|
title: "Block Size",
|
|
6518
7103
|
description: "Size of each pixelation block",
|
|
6519
7104
|
minimum: 2,
|
|
6520
|
-
|
|
7105
|
+
maximum: 64,
|
|
7106
|
+
default: 4
|
|
6521
7107
|
}
|
|
6522
7108
|
},
|
|
6523
7109
|
required: ["image"],
|
|
@@ -6525,14 +7111,12 @@ var inputSchema49 = {
|
|
|
6525
7111
|
};
|
|
6526
7112
|
var outputSchema48 = {
|
|
6527
7113
|
type: "object",
|
|
6528
|
-
properties: {
|
|
6529
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Pixelated image" })
|
|
6530
|
-
},
|
|
7114
|
+
properties: { image: GpuImageSchema9({ title: "Image", description: "Pixelated image" }) },
|
|
6531
7115
|
required: ["image"],
|
|
6532
7116
|
additionalProperties: false
|
|
6533
7117
|
};
|
|
6534
7118
|
|
|
6535
|
-
class ImagePixelateTask extends
|
|
7119
|
+
class ImagePixelateTask extends ImageFilterTask {
|
|
6536
7120
|
static type = "ImagePixelateTask";
|
|
6537
7121
|
static category = "Image";
|
|
6538
7122
|
static title = "Pixelate Image";
|
|
@@ -6543,56 +7127,28 @@ class ImagePixelateTask extends Task50 {
|
|
|
6543
7127
|
static outputSchema() {
|
|
6544
7128
|
return outputSchema48;
|
|
6545
7129
|
}
|
|
6546
|
-
|
|
6547
|
-
|
|
7130
|
+
filterName = "pixelate";
|
|
7131
|
+
opParams(input) {
|
|
7132
|
+
return { blockSize: input.blockSize ?? 4 };
|
|
6548
7133
|
}
|
|
6549
|
-
|
|
6550
|
-
return
|
|
7134
|
+
scalePreviewParams({ blockSize }, s) {
|
|
7135
|
+
return { blockSize: Math.max(1, Math.round(blockSize * s)) };
|
|
6551
7136
|
}
|
|
6552
7137
|
}
|
|
6553
7138
|
Workflow25.prototype.imagePixelate = CreateWorkflow24(ImagePixelateTask);
|
|
6554
|
-
// src/task/image/ImagePosterizeTask.ts
|
|
6555
|
-
import {
|
|
6556
|
-
|
|
6557
|
-
Task as Task51,
|
|
6558
|
-
Workflow as Workflow26
|
|
6559
|
-
} from "@workglow/task-graph";
|
|
6560
|
-
async function posterizeImage(input) {
|
|
6561
|
-
const levels = input.levels ?? 4;
|
|
6562
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
6563
|
-
const { data: src, width, height, channels } = img;
|
|
6564
|
-
const step = 255 / (levels - 1);
|
|
6565
|
-
const lut = new Uint8ClampedArray(256);
|
|
6566
|
-
for (let i = 0;i < 256; i++) {
|
|
6567
|
-
lut[i] = Math.round(Math.round(i / step) * step);
|
|
6568
|
-
}
|
|
6569
|
-
const dst = new Uint8ClampedArray(src.length);
|
|
6570
|
-
if (channels === 4) {
|
|
6571
|
-
for (let i = 0;i < src.length; i += 4) {
|
|
6572
|
-
dst[i] = lut[src[i]];
|
|
6573
|
-
dst[i + 1] = lut[src[i + 1]];
|
|
6574
|
-
dst[i + 2] = lut[src[i + 2]];
|
|
6575
|
-
dst[i + 3] = src[i + 3];
|
|
6576
|
-
}
|
|
6577
|
-
} else {
|
|
6578
|
-
for (let i = 0;i < src.length; i++) {
|
|
6579
|
-
dst[i] = lut[src[i]];
|
|
6580
|
-
}
|
|
6581
|
-
}
|
|
6582
|
-
return { data: dst, width, height, channels };
|
|
6583
|
-
});
|
|
6584
|
-
return { image };
|
|
6585
|
-
}
|
|
7139
|
+
// src/task/image/posterize/ImagePosterizeTask.ts
|
|
7140
|
+
import { CreateWorkflow as CreateWorkflow25, Workflow as Workflow26 } from "@workglow/task-graph";
|
|
7141
|
+
import { GpuImageSchema as GpuImageSchema10 } from "@workglow/util/media";
|
|
6586
7142
|
var inputSchema50 = {
|
|
6587
7143
|
type: "object",
|
|
6588
7144
|
properties: {
|
|
6589
|
-
image:
|
|
7145
|
+
image: GpuImageSchema10({ title: "Image", description: "Source image" }),
|
|
6590
7146
|
levels: {
|
|
6591
7147
|
type: "integer",
|
|
6592
7148
|
title: "Levels",
|
|
6593
|
-
description: "Number of color levels per channel (2-
|
|
7149
|
+
description: "Number of color levels per channel (2-16)",
|
|
6594
7150
|
minimum: 2,
|
|
6595
|
-
maximum:
|
|
7151
|
+
maximum: 16,
|
|
6596
7152
|
default: 4
|
|
6597
7153
|
}
|
|
6598
7154
|
},
|
|
@@ -6601,14 +7157,12 @@ var inputSchema50 = {
|
|
|
6601
7157
|
};
|
|
6602
7158
|
var outputSchema49 = {
|
|
6603
7159
|
type: "object",
|
|
6604
|
-
properties: {
|
|
6605
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Posterized image" })
|
|
6606
|
-
},
|
|
7160
|
+
properties: { image: GpuImageSchema10({ title: "Image", description: "Posterized image" }) },
|
|
6607
7161
|
required: ["image"],
|
|
6608
7162
|
additionalProperties: false
|
|
6609
7163
|
};
|
|
6610
7164
|
|
|
6611
|
-
class ImagePosterizeTask extends
|
|
7165
|
+
class ImagePosterizeTask extends ImageFilterTask {
|
|
6612
7166
|
static type = "ImagePosterizeTask";
|
|
6613
7167
|
static category = "Image";
|
|
6614
7168
|
static title = "Posterize";
|
|
@@ -6619,52 +7173,34 @@ class ImagePosterizeTask extends Task51 {
|
|
|
6619
7173
|
static outputSchema() {
|
|
6620
7174
|
return outputSchema49;
|
|
6621
7175
|
}
|
|
6622
|
-
|
|
6623
|
-
|
|
6624
|
-
|
|
6625
|
-
async executePreview(input, _context) {
|
|
6626
|
-
return await posterizeImage(input);
|
|
7176
|
+
filterName = "posterize";
|
|
7177
|
+
opParams(input) {
|
|
7178
|
+
return { levels: input.levels ?? 4 };
|
|
6627
7179
|
}
|
|
6628
7180
|
}
|
|
6629
7181
|
Workflow26.prototype.imagePosterize = CreateWorkflow25(ImagePosterizeTask);
|
|
6630
7182
|
// src/task/image/imageRasterCodecRegistry.ts
|
|
6631
|
-
import { getImageRasterCodec
|
|
6632
|
-
// src/task/image/ImageResizeTask.ts
|
|
6633
|
-
import {
|
|
6634
|
-
|
|
6635
|
-
Task as Task52,
|
|
6636
|
-
Workflow as Workflow27
|
|
6637
|
-
} from "@workglow/task-graph";
|
|
6638
|
-
async function resizeImage(input) {
|
|
6639
|
-
const { width: dstW, height: dstH } = input;
|
|
6640
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
6641
|
-
const { data: src, width: srcW, height: srcH, channels } = img;
|
|
6642
|
-
const dst = new Uint8ClampedArray(dstW * dstH * channels);
|
|
6643
|
-
for (let dy = 0;dy < dstH; dy++) {
|
|
6644
|
-
const srcY = Math.min(Math.floor(dy * srcH / dstH), srcH - 1);
|
|
6645
|
-
for (let dx = 0;dx < dstW; dx++) {
|
|
6646
|
-
const srcX = Math.min(Math.floor(dx * srcW / dstW), srcW - 1);
|
|
6647
|
-
const srcIdx = (srcY * srcW + srcX) * channels;
|
|
6648
|
-
const dstIdx = (dy * dstW + dx) * channels;
|
|
6649
|
-
for (let c = 0;c < channels; c++) {
|
|
6650
|
-
dst[dstIdx + c] = src[srcIdx + c];
|
|
6651
|
-
}
|
|
6652
|
-
}
|
|
6653
|
-
}
|
|
6654
|
-
return { data: dst, width: dstW, height: dstH, channels };
|
|
6655
|
-
});
|
|
6656
|
-
return { image };
|
|
6657
|
-
}
|
|
7183
|
+
import { getImageRasterCodec, registerImageRasterCodec as registerImageRasterCodec2 } from "@workglow/util/media";
|
|
7184
|
+
// src/task/image/resize/ImageResizeTask.ts
|
|
7185
|
+
import { CreateWorkflow as CreateWorkflow26, Workflow as Workflow27 } from "@workglow/task-graph";
|
|
7186
|
+
import { GpuImageSchema as GpuImageSchema11 } from "@workglow/util/media";
|
|
6658
7187
|
var inputSchema51 = {
|
|
6659
7188
|
type: "object",
|
|
6660
7189
|
properties: {
|
|
6661
|
-
image:
|
|
7190
|
+
image: GpuImageSchema11({ title: "Image", description: "Source image" }),
|
|
6662
7191
|
width: { type: "integer", title: "Width", description: "Target width in pixels", minimum: 1 },
|
|
6663
|
-
height: {
|
|
6664
|
-
|
|
6665
|
-
|
|
6666
|
-
|
|
6667
|
-
|
|
7192
|
+
height: { type: "integer", title: "Height", description: "Target height in pixels", minimum: 1 },
|
|
7193
|
+
fit: {
|
|
7194
|
+
type: "string",
|
|
7195
|
+
enum: ["cover", "contain", "fill", "inside", "outside"],
|
|
7196
|
+
title: "Fit",
|
|
7197
|
+
description: "How the image should be resized to fit"
|
|
7198
|
+
},
|
|
7199
|
+
kernel: {
|
|
7200
|
+
type: "string",
|
|
7201
|
+
enum: ["nearest", "cubic", "mitchell", "lanczos2", "lanczos3"],
|
|
7202
|
+
title: "Kernel",
|
|
7203
|
+
description: "Resampling kernel"
|
|
6668
7204
|
}
|
|
6669
7205
|
},
|
|
6670
7206
|
required: ["image", "width", "height"],
|
|
@@ -6672,14 +7208,12 @@ var inputSchema51 = {
|
|
|
6672
7208
|
};
|
|
6673
7209
|
var outputSchema50 = {
|
|
6674
7210
|
type: "object",
|
|
6675
|
-
properties: {
|
|
6676
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Resized image" })
|
|
6677
|
-
},
|
|
7211
|
+
properties: { image: GpuImageSchema11({ title: "Image", description: "Resized image" }) },
|
|
6678
7212
|
required: ["image"],
|
|
6679
7213
|
additionalProperties: false
|
|
6680
7214
|
};
|
|
6681
7215
|
|
|
6682
|
-
class ImageResizeTask extends
|
|
7216
|
+
class ImageResizeTask extends ImageFilterTask {
|
|
6683
7217
|
static type = "ImageResizeTask";
|
|
6684
7218
|
static category = "Image";
|
|
6685
7219
|
static title = "Resize Image";
|
|
@@ -6690,61 +7224,42 @@ class ImageResizeTask extends Task52 {
|
|
|
6690
7224
|
static outputSchema() {
|
|
6691
7225
|
return outputSchema50;
|
|
6692
7226
|
}
|
|
6693
|
-
|
|
6694
|
-
|
|
7227
|
+
filterName = "resize";
|
|
7228
|
+
opParams(input) {
|
|
7229
|
+
return {
|
|
7230
|
+
width: input.width,
|
|
7231
|
+
height: input.height,
|
|
7232
|
+
fit: input.fit,
|
|
7233
|
+
kernel: input.kernel
|
|
7234
|
+
};
|
|
6695
7235
|
}
|
|
6696
|
-
|
|
6697
|
-
return
|
|
7236
|
+
scalePreviewParams({ width, height, fit, kernel }, s) {
|
|
7237
|
+
return {
|
|
7238
|
+
width: Math.max(1, Math.round(width * s)),
|
|
7239
|
+
height: Math.max(1, Math.round(height * s)),
|
|
7240
|
+
fit,
|
|
7241
|
+
kernel
|
|
7242
|
+
};
|
|
6698
7243
|
}
|
|
6699
7244
|
}
|
|
6700
7245
|
Workflow27.prototype.imageResize = CreateWorkflow26(ImageResizeTask);
|
|
6701
|
-
// src/task/image/ImageRotateTask.ts
|
|
6702
|
-
import {
|
|
6703
|
-
|
|
6704
|
-
Task as Task53,
|
|
6705
|
-
Workflow as Workflow28
|
|
6706
|
-
} from "@workglow/task-graph";
|
|
6707
|
-
async function rotateImage(input) {
|
|
6708
|
-
const { angle } = input;
|
|
6709
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
6710
|
-
const { data: src, width: srcW, height: srcH, channels } = img;
|
|
6711
|
-
const swap = angle === 90 || angle === 270;
|
|
6712
|
-
const dstW = swap ? srcH : srcW;
|
|
6713
|
-
const dstH = swap ? srcW : srcH;
|
|
6714
|
-
const dst = new Uint8ClampedArray(dstW * dstH * channels);
|
|
6715
|
-
for (let sy = 0;sy < srcH; sy++) {
|
|
6716
|
-
for (let sx = 0;sx < srcW; sx++) {
|
|
6717
|
-
let dx, dy;
|
|
6718
|
-
if (angle === 90) {
|
|
6719
|
-
dx = srcH - 1 - sy;
|
|
6720
|
-
dy = sx;
|
|
6721
|
-
} else if (angle === 180) {
|
|
6722
|
-
dx = srcW - 1 - sx;
|
|
6723
|
-
dy = srcH - 1 - sy;
|
|
6724
|
-
} else {
|
|
6725
|
-
dx = sy;
|
|
6726
|
-
dy = srcW - 1 - sx;
|
|
6727
|
-
}
|
|
6728
|
-
const srcIdx = (sy * srcW + sx) * channels;
|
|
6729
|
-
const dstIdx = (dy * dstW + dx) * channels;
|
|
6730
|
-
for (let c = 0;c < channels; c++) {
|
|
6731
|
-
dst[dstIdx + c] = src[srcIdx + c];
|
|
6732
|
-
}
|
|
6733
|
-
}
|
|
6734
|
-
}
|
|
6735
|
-
return { data: dst, width: dstW, height: dstH, channels };
|
|
6736
|
-
});
|
|
6737
|
-
return { image };
|
|
6738
|
-
}
|
|
7246
|
+
// src/task/image/rotate/ImageRotateTask.ts
|
|
7247
|
+
import { CreateWorkflow as CreateWorkflow27, Workflow as Workflow28 } from "@workglow/task-graph";
|
|
7248
|
+
import { GpuImageSchema as GpuImageSchema12 } from "@workglow/util/media";
|
|
6739
7249
|
var inputSchema52 = {
|
|
6740
7250
|
type: "object",
|
|
6741
7251
|
properties: {
|
|
6742
|
-
image:
|
|
7252
|
+
image: GpuImageSchema12({ title: "Image", description: "Source image" }),
|
|
6743
7253
|
angle: {
|
|
6744
7254
|
type: "integer",
|
|
6745
7255
|
enum: [90, 180, 270],
|
|
6746
7256
|
title: "Angle",
|
|
6747
7257
|
description: "Rotation angle in degrees (clockwise)"
|
|
7258
|
+
},
|
|
7259
|
+
background: {
|
|
7260
|
+
type: "string",
|
|
7261
|
+
title: "Background",
|
|
7262
|
+
description: "Background color for rotation (hex string)"
|
|
6748
7263
|
}
|
|
6749
7264
|
},
|
|
6750
7265
|
required: ["image", "angle"],
|
|
@@ -6752,14 +7267,12 @@ var inputSchema52 = {
|
|
|
6752
7267
|
};
|
|
6753
7268
|
var outputSchema51 = {
|
|
6754
7269
|
type: "object",
|
|
6755
|
-
properties: {
|
|
6756
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Rotated image" })
|
|
6757
|
-
},
|
|
7270
|
+
properties: { image: GpuImageSchema12({ title: "Image", description: "Rotated image" }) },
|
|
6758
7271
|
required: ["image"],
|
|
6759
7272
|
additionalProperties: false
|
|
6760
7273
|
};
|
|
6761
7274
|
|
|
6762
|
-
class ImageRotateTask extends
|
|
7275
|
+
class ImageRotateTask extends ImageFilterTask {
|
|
6763
7276
|
static type = "ImageRotateTask";
|
|
6764
7277
|
static category = "Image";
|
|
6765
7278
|
static title = "Rotate Image";
|
|
@@ -6770,64 +7283,87 @@ class ImageRotateTask extends Task53 {
|
|
|
6770
7283
|
static outputSchema() {
|
|
6771
7284
|
return outputSchema51;
|
|
6772
7285
|
}
|
|
6773
|
-
|
|
6774
|
-
|
|
6775
|
-
|
|
6776
|
-
|
|
6777
|
-
|
|
7286
|
+
filterName = "rotate";
|
|
7287
|
+
opParams(input) {
|
|
7288
|
+
return {
|
|
7289
|
+
angle: input.angle,
|
|
7290
|
+
background: input.background
|
|
7291
|
+
};
|
|
6778
7292
|
}
|
|
6779
7293
|
}
|
|
6780
7294
|
Workflow28.prototype.imageRotate = CreateWorkflow27(ImageRotateTask);
|
|
6781
|
-
// src/task/image/
|
|
6782
|
-
import {
|
|
6783
|
-
|
|
6784
|
-
|
|
6785
|
-
|
|
6786
|
-
}
|
|
6787
|
-
|
|
6788
|
-
|
|
6789
|
-
|
|
6790
|
-
|
|
6791
|
-
|
|
6792
|
-
|
|
6793
|
-
|
|
6794
|
-
|
|
6795
|
-
|
|
6796
|
-
|
|
6797
|
-
|
|
6798
|
-
|
|
6799
|
-
|
|
6800
|
-
|
|
6801
|
-
|
|
6802
|
-
|
|
6803
|
-
|
|
6804
|
-
|
|
6805
|
-
|
|
6806
|
-
|
|
6807
|
-
|
|
7295
|
+
// src/task/image/ImageSchemas.ts
|
|
7296
|
+
import { FromSchemaDefaultOptions } from "@workglow/util/schema";
|
|
7297
|
+
var cssRgbChannelPattern = "(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]?\\d)";
|
|
7298
|
+
var cssRgbAlphaPattern = "(?:0(?:\\.\\d+)?|1(?:\\.0+)?)";
|
|
7299
|
+
var cssRgbColorPattern = `^rgba?\\(\\s*${cssRgbChannelPattern}\\s*,\\s*${cssRgbChannelPattern}\\s*,\\s*` + `${cssRgbChannelPattern}\\s*(?:,\\s*${cssRgbAlphaPattern})?\\s*\\)$`;
|
|
7300
|
+
var ColorSchema = (annotations = {}) => ({
|
|
7301
|
+
type: "object",
|
|
7302
|
+
properties: {
|
|
7303
|
+
r: { type: "integer", minimum: 0, maximum: 255, title: "Red" },
|
|
7304
|
+
g: { type: "integer", minimum: 0, maximum: 255, title: "Green" },
|
|
7305
|
+
b: { type: "integer", minimum: 0, maximum: 255, title: "Blue" },
|
|
7306
|
+
a: { type: "integer", minimum: 0, maximum: 255, title: "Alpha", default: 255 }
|
|
7307
|
+
},
|
|
7308
|
+
required: ["r", "g", "b"],
|
|
7309
|
+
format: "color",
|
|
7310
|
+
additionalProperties: false,
|
|
7311
|
+
...annotations
|
|
7312
|
+
});
|
|
7313
|
+
var HexColorSchema = (annotations = {}) => ({
|
|
7314
|
+
type: "string",
|
|
7315
|
+
format: "color",
|
|
7316
|
+
pattern: "^#([0-9a-fA-F]{3,4}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})$",
|
|
7317
|
+
title: "Color (hex)",
|
|
7318
|
+
description: "Color as a `#RRGGBB[AA]` or `#RGB[A]` hex string",
|
|
7319
|
+
...annotations
|
|
7320
|
+
});
|
|
7321
|
+
var CssRgbColorSchema = (annotations = {}) => ({
|
|
7322
|
+
type: "string",
|
|
7323
|
+
format: "color",
|
|
7324
|
+
pattern: cssRgbColorPattern,
|
|
7325
|
+
title: "Color (RGB)",
|
|
7326
|
+
description: "Color as a CSS `rgb(r,g,b)` or `rgba(r,g,b,a)` string",
|
|
7327
|
+
...annotations
|
|
7328
|
+
});
|
|
7329
|
+
var ColorValueSchema = (annotations = {}) => ({
|
|
7330
|
+
oneOf: [
|
|
7331
|
+
ColorSchema(),
|
|
7332
|
+
HexColorSchema({
|
|
7333
|
+
title: annotations.title ?? "Color",
|
|
7334
|
+
description: annotations.description ?? "Color as {r,g,b,a} object, `#RRGGBB[AA]` / `#RGB[A]` hex string, or CSS `rgb(...)` / `rgba(...)` string"
|
|
7335
|
+
}),
|
|
7336
|
+
CssRgbColorSchema()
|
|
7337
|
+
],
|
|
7338
|
+
...annotations
|
|
7339
|
+
});
|
|
7340
|
+
var ColorObjectType = null;
|
|
7341
|
+
var ColorFromSchemaOptions = {
|
|
7342
|
+
...FromSchemaDefaultOptions,
|
|
7343
|
+
deserialize: [
|
|
7344
|
+
{
|
|
7345
|
+
pattern: { type: "object", format: "color" },
|
|
7346
|
+
output: ColorObjectType
|
|
6808
7347
|
}
|
|
6809
|
-
|
|
6810
|
-
|
|
6811
|
-
|
|
6812
|
-
}
|
|
7348
|
+
]
|
|
7349
|
+
};
|
|
7350
|
+
// src/task/image/sepia/ImageSepiaTask.ts
|
|
7351
|
+
import { CreateWorkflow as CreateWorkflow28, Workflow as Workflow29 } from "@workglow/task-graph";
|
|
7352
|
+
import { GpuImageSchema as GpuImageSchema13 } from "@workglow/util/media";
|
|
6813
7353
|
var inputSchema53 = {
|
|
6814
7354
|
type: "object",
|
|
6815
|
-
properties: {
|
|
6816
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Source image" })
|
|
6817
|
-
},
|
|
7355
|
+
properties: { image: GpuImageSchema13({ title: "Image", description: "Source image" }) },
|
|
6818
7356
|
required: ["image"],
|
|
6819
7357
|
additionalProperties: false
|
|
6820
7358
|
};
|
|
6821
7359
|
var outputSchema52 = {
|
|
6822
7360
|
type: "object",
|
|
6823
|
-
properties: {
|
|
6824
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Sepia-toned image" })
|
|
6825
|
-
},
|
|
7361
|
+
properties: { image: GpuImageSchema13({ title: "Image", description: "Sepia-toned image" }) },
|
|
6826
7362
|
required: ["image"],
|
|
6827
7363
|
additionalProperties: false
|
|
6828
7364
|
};
|
|
6829
7365
|
|
|
6830
|
-
class ImageSepiaTask extends
|
|
7366
|
+
class ImageSepiaTask extends ImageFilterTask {
|
|
6831
7367
|
static type = "ImageSepiaTask";
|
|
6832
7368
|
static category = "Image";
|
|
6833
7369
|
static title = "Sepia Tone";
|
|
@@ -6838,21 +7374,24 @@ class ImageSepiaTask extends Task54 {
|
|
|
6838
7374
|
static outputSchema() {
|
|
6839
7375
|
return outputSchema52;
|
|
6840
7376
|
}
|
|
6841
|
-
|
|
6842
|
-
|
|
6843
|
-
|
|
6844
|
-
async executePreview(input, _context) {
|
|
6845
|
-
return await applySepia(input);
|
|
7377
|
+
filterName = "sepia";
|
|
7378
|
+
opParams(_input) {
|
|
7379
|
+
return;
|
|
6846
7380
|
}
|
|
6847
7381
|
}
|
|
6848
7382
|
Workflow29.prototype.imageSepia = CreateWorkflow28(ImageSepiaTask);
|
|
6849
|
-
// src/task/image/ImageTextTask.ts
|
|
7383
|
+
// src/task/image/text/ImageTextTask.ts
|
|
6850
7384
|
import {
|
|
6851
7385
|
CreateWorkflow as CreateWorkflow29,
|
|
6852
|
-
Task as
|
|
7386
|
+
Task as Task43,
|
|
6853
7387
|
Workflow as Workflow30
|
|
6854
7388
|
} from "@workglow/task-graph";
|
|
6855
|
-
import {
|
|
7389
|
+
import {
|
|
7390
|
+
CpuImage as CpuImage18,
|
|
7391
|
+
getPreviewBudget,
|
|
7392
|
+
GpuImageSchema as GpuImageSchema14,
|
|
7393
|
+
resolveColor as resolveColor5
|
|
7394
|
+
} from "@workglow/util/media";
|
|
6856
7395
|
function toRgbaImage(image) {
|
|
6857
7396
|
const { data, width, height, channels } = image;
|
|
6858
7397
|
const rgba = new Uint8ClampedArray(width * height * 4);
|
|
@@ -6927,7 +7466,7 @@ var IMAGE_TEXT_POSITION_LABELS = {
|
|
|
6927
7466
|
"bottom-center": "Bottom center",
|
|
6928
7467
|
"bottom-right": "Bottom right"
|
|
6929
7468
|
};
|
|
6930
|
-
var backgroundImageProperty =
|
|
7469
|
+
var backgroundImageProperty = GpuImageSchema14({
|
|
6931
7470
|
title: "Image",
|
|
6932
7471
|
description: "Background image to render the text onto"
|
|
6933
7472
|
});
|
|
@@ -6992,65 +7531,73 @@ var inputSchema54 = {
|
|
|
6992
7531
|
var outputSchema53 = {
|
|
6993
7532
|
type: "object",
|
|
6994
7533
|
properties: {
|
|
6995
|
-
image:
|
|
7534
|
+
image: GpuImageSchema14({ title: "Image", description: "Raster image with text" })
|
|
6996
7535
|
},
|
|
6997
7536
|
required: ["image"],
|
|
6998
7537
|
additionalProperties: false
|
|
6999
7538
|
};
|
|
7000
|
-
function
|
|
7001
|
-
|
|
7002
|
-
|
|
7003
|
-
|
|
7004
|
-
|
|
7005
|
-
|
|
7006
|
-
|
|
7007
|
-
|
|
7008
|
-
|
|
7539
|
+
function resolveTextParams(input) {
|
|
7540
|
+
return {
|
|
7541
|
+
text: input.text,
|
|
7542
|
+
font: input.font ?? "sans-serif",
|
|
7543
|
+
fontSize: input.fontSize ?? 24,
|
|
7544
|
+
bold: input.bold ?? false,
|
|
7545
|
+
italic: input.italic ?? false,
|
|
7546
|
+
color: resolveColor5(input.color),
|
|
7547
|
+
position: input.position ?? "middle-center"
|
|
7548
|
+
};
|
|
7009
7549
|
}
|
|
7010
|
-
|
|
7011
|
-
|
|
7012
|
-
|
|
7013
|
-
|
|
7014
|
-
|
|
7015
|
-
|
|
7016
|
-
|
|
7550
|
+
function requireStandaloneDims(input) {
|
|
7551
|
+
if (!("width" in input) || !("height" in input) || typeof input.width !== "number" || typeof input.height !== "number") {
|
|
7552
|
+
throw new Error("ImageTextTask: width and height are required when no background image is provided");
|
|
7553
|
+
}
|
|
7554
|
+
return { width: input.width, height: input.height };
|
|
7555
|
+
}
|
|
7556
|
+
async function renderTextOverBackground(params, backgroundImage, previewScale) {
|
|
7557
|
+
const background = await backgroundImage.materialize();
|
|
7558
|
+
const overlay = await renderImageTextToRgba({
|
|
7559
|
+
text: params.text,
|
|
7560
|
+
font: params.font,
|
|
7561
|
+
fontSize: Math.max(1, Math.round(params.fontSize * previewScale)),
|
|
7562
|
+
bold: params.bold,
|
|
7563
|
+
italic: params.italic,
|
|
7564
|
+
color: params.color,
|
|
7565
|
+
width: background.width,
|
|
7566
|
+
height: background.height,
|
|
7567
|
+
position: params.position
|
|
7568
|
+
});
|
|
7569
|
+
const composited = compositeTextOverBackground(background, overlay);
|
|
7570
|
+
return {
|
|
7571
|
+
image: CpuImage18.fromImageBinary(composited, previewScale)
|
|
7572
|
+
};
|
|
7573
|
+
}
|
|
7574
|
+
async function renderTextStandalone(params, width, height, previewScale) {
|
|
7575
|
+
const textBinary = await renderImageTextToRgba({
|
|
7576
|
+
text: params.text,
|
|
7577
|
+
font: params.font,
|
|
7578
|
+
fontSize: Math.max(1, Math.round(params.fontSize * previewScale)),
|
|
7579
|
+
bold: params.bold,
|
|
7580
|
+
italic: params.italic,
|
|
7581
|
+
color: params.color,
|
|
7582
|
+
width: Math.max(1, Math.round(width * previewScale)),
|
|
7583
|
+
height: Math.max(1, Math.round(height * previewScale)),
|
|
7584
|
+
position: params.position
|
|
7585
|
+
});
|
|
7586
|
+
return {
|
|
7587
|
+
image: CpuImage18.fromImageBinary(textBinary, previewScale)
|
|
7588
|
+
};
|
|
7589
|
+
}
|
|
7590
|
+
async function runText(input) {
|
|
7591
|
+
const params = resolveTextParams(input);
|
|
7017
7592
|
const backgroundImage = "image" in input ? input.image : undefined;
|
|
7018
|
-
|
|
7019
|
-
|
|
7020
|
-
image = await produceImageOutput(backgroundImage, async (background) => {
|
|
7021
|
-
const overlay = await renderImageTextToRgba({
|
|
7022
|
-
text: input.text,
|
|
7023
|
-
font,
|
|
7024
|
-
fontSize,
|
|
7025
|
-
bold,
|
|
7026
|
-
italic,
|
|
7027
|
-
color,
|
|
7028
|
-
width: background.width,
|
|
7029
|
-
height: background.height,
|
|
7030
|
-
position
|
|
7031
|
-
});
|
|
7032
|
-
return compositeTextOverBackground(background, overlay);
|
|
7033
|
-
});
|
|
7034
|
-
} else {
|
|
7035
|
-
if (!("width" in input) || !("height" in input) || typeof input.width !== "number" || typeof input.height !== "number") {
|
|
7036
|
-
throw new Error("ImageTextTask: width and height are required when no background image is provided");
|
|
7037
|
-
}
|
|
7038
|
-
image = await renderImageTextToRgba({
|
|
7039
|
-
text: input.text,
|
|
7040
|
-
font,
|
|
7041
|
-
fontSize,
|
|
7042
|
-
bold,
|
|
7043
|
-
italic,
|
|
7044
|
-
color,
|
|
7045
|
-
width: input.width,
|
|
7046
|
-
height: input.height,
|
|
7047
|
-
position
|
|
7048
|
-
});
|
|
7593
|
+
if (backgroundImage != null) {
|
|
7594
|
+
return renderTextOverBackground(params, backgroundImage, 1);
|
|
7049
7595
|
}
|
|
7050
|
-
|
|
7596
|
+
const { width, height } = requireStandaloneDims(input);
|
|
7597
|
+
return renderTextStandalone(params, width, height, 1);
|
|
7051
7598
|
}
|
|
7052
7599
|
|
|
7053
|
-
class ImageTextTask extends
|
|
7600
|
+
class ImageTextTask extends Task43 {
|
|
7054
7601
|
static type = "ImageTextTask";
|
|
7055
7602
|
static category = "Image";
|
|
7056
7603
|
static title = "Render Text to Image";
|
|
@@ -7069,46 +7616,32 @@ class ImageTextTask extends Task55 {
|
|
|
7069
7616
|
return defaults;
|
|
7070
7617
|
}
|
|
7071
7618
|
async execute(input, _context) {
|
|
7072
|
-
return await
|
|
7619
|
+
return await runText(input);
|
|
7073
7620
|
}
|
|
7074
7621
|
async executePreview(input, _context) {
|
|
7075
|
-
|
|
7622
|
+
const params = resolveTextParams(input);
|
|
7623
|
+
const backgroundImage = "image" in input ? input.image : undefined;
|
|
7624
|
+
if (backgroundImage != null) {
|
|
7625
|
+
return await renderTextOverBackground(params, backgroundImage, backgroundImage.previewScale);
|
|
7626
|
+
}
|
|
7627
|
+
const { width, height } = requireStandaloneDims(input);
|
|
7628
|
+
const longEdge = Math.max(width, height);
|
|
7629
|
+
const budget = getPreviewBudget();
|
|
7630
|
+
const s = longEdge > budget ? budget / longEdge : 1;
|
|
7631
|
+
return await renderTextStandalone(params, width, height, s);
|
|
7076
7632
|
}
|
|
7077
7633
|
}
|
|
7078
7634
|
Workflow30.prototype.imageText = CreateWorkflow29(ImageTextTask);
|
|
7079
|
-
// src/task/image/ImageThresholdTask.ts
|
|
7080
|
-
import {
|
|
7081
|
-
|
|
7082
|
-
Task as Task56,
|
|
7083
|
-
Workflow as Workflow31
|
|
7084
|
-
} from "@workglow/task-graph";
|
|
7085
|
-
async function applyThreshold(input) {
|
|
7086
|
-
const threshold = input.threshold ?? 128;
|
|
7087
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
7088
|
-
const { data: src, width, height, channels } = img;
|
|
7089
|
-
const pixelCount = width * height;
|
|
7090
|
-
const dst = new Uint8ClampedArray(pixelCount);
|
|
7091
|
-
for (let i = 0;i < pixelCount; i++) {
|
|
7092
|
-
const idx = i * channels;
|
|
7093
|
-
let gray;
|
|
7094
|
-
if (channels === 1) {
|
|
7095
|
-
gray = src[idx];
|
|
7096
|
-
} else {
|
|
7097
|
-
gray = src[idx] * 77 + src[idx + 1] * 150 + src[idx + 2] * 29 >> 8;
|
|
7098
|
-
}
|
|
7099
|
-
dst[i] = gray >= threshold ? 255 : 0;
|
|
7100
|
-
}
|
|
7101
|
-
return { data: dst, width, height, channels: 1 };
|
|
7102
|
-
});
|
|
7103
|
-
return { image };
|
|
7104
|
-
}
|
|
7635
|
+
// src/task/image/threshold/ImageThresholdTask.ts
|
|
7636
|
+
import { CreateWorkflow as CreateWorkflow30, Workflow as Workflow31 } from "@workglow/task-graph";
|
|
7637
|
+
import { GpuImageSchema as GpuImageSchema15 } from "@workglow/util/media";
|
|
7105
7638
|
var inputSchema55 = {
|
|
7106
7639
|
type: "object",
|
|
7107
7640
|
properties: {
|
|
7108
|
-
image:
|
|
7109
|
-
|
|
7110
|
-
type: "
|
|
7111
|
-
title: "
|
|
7641
|
+
image: GpuImageSchema15({ title: "Image", description: "Source image" }),
|
|
7642
|
+
value: {
|
|
7643
|
+
type: "number",
|
|
7644
|
+
title: "Value",
|
|
7112
7645
|
description: "Threshold value (0-255)",
|
|
7113
7646
|
minimum: 0,
|
|
7114
7647
|
maximum: 255,
|
|
@@ -7120,77 +7653,35 @@ var inputSchema55 = {
|
|
|
7120
7653
|
};
|
|
7121
7654
|
var outputSchema54 = {
|
|
7122
7655
|
type: "object",
|
|
7123
|
-
properties: {
|
|
7124
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Thresholded binary image" })
|
|
7125
|
-
},
|
|
7656
|
+
properties: { image: GpuImageSchema15({ title: "Image", description: "Thresholded image" }) },
|
|
7126
7657
|
required: ["image"],
|
|
7127
7658
|
additionalProperties: false
|
|
7128
7659
|
};
|
|
7129
7660
|
|
|
7130
|
-
class ImageThresholdTask extends
|
|
7661
|
+
class ImageThresholdTask extends ImageFilterTask {
|
|
7131
7662
|
static type = "ImageThresholdTask";
|
|
7132
7663
|
static category = "Image";
|
|
7133
7664
|
static title = "Threshold";
|
|
7134
|
-
static description = "
|
|
7665
|
+
static description = "Applies a binary threshold per channel";
|
|
7135
7666
|
static inputSchema() {
|
|
7136
7667
|
return inputSchema55;
|
|
7137
7668
|
}
|
|
7138
7669
|
static outputSchema() {
|
|
7139
7670
|
return outputSchema54;
|
|
7140
7671
|
}
|
|
7141
|
-
|
|
7142
|
-
|
|
7143
|
-
|
|
7144
|
-
async executePreview(input, _context) {
|
|
7145
|
-
return await applyThreshold(input);
|
|
7672
|
+
filterName = "threshold";
|
|
7673
|
+
opParams(input) {
|
|
7674
|
+
return { value: input.value ?? 128 };
|
|
7146
7675
|
}
|
|
7147
7676
|
}
|
|
7148
7677
|
Workflow31.prototype.imageThreshold = CreateWorkflow30(ImageThresholdTask);
|
|
7149
|
-
// src/task/image/ImageTintTask.ts
|
|
7150
|
-
import {
|
|
7151
|
-
|
|
7152
|
-
Task as Task57,
|
|
7153
|
-
Workflow as Workflow32
|
|
7154
|
-
} from "@workglow/task-graph";
|
|
7155
|
-
import { resolveColor as resolveColor3 } from "@workglow/util/media";
|
|
7156
|
-
async function applyTint(input) {
|
|
7157
|
-
const { r: tr, g: tg, b: tb } = resolveColor3(input.color);
|
|
7158
|
-
const amount = input.amount ?? 0.5;
|
|
7159
|
-
const invAmount = 1 - amount;
|
|
7160
|
-
const tintR = tr * amount;
|
|
7161
|
-
const tintG = tg * amount;
|
|
7162
|
-
const tintB = tb * amount;
|
|
7163
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
7164
|
-
const { data: src, width, height, channels } = img;
|
|
7165
|
-
const pixelCount = width * height;
|
|
7166
|
-
if (channels === 1) {
|
|
7167
|
-
const dst2 = new Uint8ClampedArray(pixelCount * 3);
|
|
7168
|
-
for (let i = 0;i < pixelCount; i++) {
|
|
7169
|
-
const gray = src[i];
|
|
7170
|
-
dst2[i * 3] = gray * invAmount + tintR;
|
|
7171
|
-
dst2[i * 3 + 1] = gray * invAmount + tintG;
|
|
7172
|
-
dst2[i * 3 + 2] = gray * invAmount + tintB;
|
|
7173
|
-
}
|
|
7174
|
-
return { data: dst2, width, height, channels: 3 };
|
|
7175
|
-
}
|
|
7176
|
-
const dst = new Uint8ClampedArray(src.length);
|
|
7177
|
-
for (let i = 0;i < pixelCount; i++) {
|
|
7178
|
-
const idx = i * channels;
|
|
7179
|
-
dst[idx] = src[idx] * invAmount + tintR;
|
|
7180
|
-
dst[idx + 1] = src[idx + 1] * invAmount + tintG;
|
|
7181
|
-
dst[idx + 2] = src[idx + 2] * invAmount + tintB;
|
|
7182
|
-
if (channels === 4) {
|
|
7183
|
-
dst[idx + 3] = src[idx + 3];
|
|
7184
|
-
}
|
|
7185
|
-
}
|
|
7186
|
-
return { data: dst, width, height, channels };
|
|
7187
|
-
});
|
|
7188
|
-
return { image };
|
|
7189
|
-
}
|
|
7678
|
+
// src/task/image/tint/ImageTintTask.ts
|
|
7679
|
+
import { CreateWorkflow as CreateWorkflow31, Workflow as Workflow32 } from "@workglow/task-graph";
|
|
7680
|
+
import { GpuImageSchema as GpuImageSchema16 } from "@workglow/util/media";
|
|
7190
7681
|
var inputSchema56 = {
|
|
7191
7682
|
type: "object",
|
|
7192
7683
|
properties: {
|
|
7193
|
-
image:
|
|
7684
|
+
image: GpuImageSchema16({ title: "Image", description: "Source image" }),
|
|
7194
7685
|
color: ColorValueSchema({ title: "Color", description: "Tint color" }),
|
|
7195
7686
|
amount: {
|
|
7196
7687
|
type: "number",
|
|
@@ -7206,14 +7697,12 @@ var inputSchema56 = {
|
|
|
7206
7697
|
};
|
|
7207
7698
|
var outputSchema55 = {
|
|
7208
7699
|
type: "object",
|
|
7209
|
-
properties: {
|
|
7210
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Tinted image" })
|
|
7211
|
-
},
|
|
7700
|
+
properties: { image: GpuImageSchema16({ title: "Image", description: "Tinted image" }) },
|
|
7212
7701
|
required: ["image"],
|
|
7213
7702
|
additionalProperties: false
|
|
7214
7703
|
};
|
|
7215
7704
|
|
|
7216
|
-
class ImageTintTask extends
|
|
7705
|
+
class ImageTintTask extends ImageFilterTask {
|
|
7217
7706
|
static type = "ImageTintTask";
|
|
7218
7707
|
static category = "Image";
|
|
7219
7708
|
static title = "Tint Image";
|
|
@@ -7224,68 +7713,42 @@ class ImageTintTask extends Task57 {
|
|
|
7224
7713
|
static outputSchema() {
|
|
7225
7714
|
return outputSchema55;
|
|
7226
7715
|
}
|
|
7227
|
-
|
|
7228
|
-
|
|
7229
|
-
|
|
7230
|
-
|
|
7231
|
-
|
|
7716
|
+
filterName = "tint";
|
|
7717
|
+
opParams(input) {
|
|
7718
|
+
return {
|
|
7719
|
+
color: input.color,
|
|
7720
|
+
amount: input.amount ?? 0.5
|
|
7721
|
+
};
|
|
7232
7722
|
}
|
|
7233
7723
|
}
|
|
7234
7724
|
Workflow32.prototype.imageTint = CreateWorkflow31(ImageTintTask);
|
|
7235
|
-
// src/task/image/ImageTransparencyTask.ts
|
|
7236
|
-
import {
|
|
7237
|
-
|
|
7238
|
-
Task as Task58,
|
|
7239
|
-
Workflow as Workflow33
|
|
7240
|
-
} from "@workglow/task-graph";
|
|
7241
|
-
async function applyTransparency(input) {
|
|
7242
|
-
const { opacity } = input;
|
|
7243
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
7244
|
-
const { data: src, width, height, channels: srcCh } = img;
|
|
7245
|
-
const pixelCount = width * height;
|
|
7246
|
-
const dst = new Uint8ClampedArray(pixelCount * 4);
|
|
7247
|
-
const alphaScale = Math.round(opacity * 255);
|
|
7248
|
-
for (let i = 0;i < pixelCount; i++) {
|
|
7249
|
-
const srcIdx = i * srcCh;
|
|
7250
|
-
const dstIdx = i * 4;
|
|
7251
|
-
dst[dstIdx] = src[srcIdx];
|
|
7252
|
-
dst[dstIdx + 1] = srcCh >= 3 ? src[srcIdx + 1] : src[srcIdx];
|
|
7253
|
-
dst[dstIdx + 2] = srcCh >= 3 ? src[srcIdx + 2] : src[srcIdx];
|
|
7254
|
-
const srcAlpha = srcCh === 4 ? src[srcIdx + 3] : 255;
|
|
7255
|
-
dst[dstIdx + 3] = (srcAlpha * alphaScale + 127) / 255;
|
|
7256
|
-
}
|
|
7257
|
-
return { data: dst, width, height, channels: 4 };
|
|
7258
|
-
});
|
|
7259
|
-
return { image };
|
|
7260
|
-
}
|
|
7725
|
+
// src/task/image/transparency/ImageTransparencyTask.ts
|
|
7726
|
+
import { CreateWorkflow as CreateWorkflow32, Workflow as Workflow33 } from "@workglow/task-graph";
|
|
7727
|
+
import { GpuImageSchema as GpuImageSchema17 } from "@workglow/util/media";
|
|
7261
7728
|
var inputSchema57 = {
|
|
7262
7729
|
type: "object",
|
|
7263
7730
|
properties: {
|
|
7264
|
-
image:
|
|
7265
|
-
|
|
7731
|
+
image: GpuImageSchema17({ title: "Image", description: "Source image" }),
|
|
7732
|
+
amount: {
|
|
7266
7733
|
type: "number",
|
|
7267
|
-
title: "
|
|
7734
|
+
title: "Amount",
|
|
7268
7735
|
description: "Opacity level (0.0 = fully transparent, 1.0 = fully opaque)",
|
|
7269
7736
|
minimum: 0,
|
|
7270
|
-
maximum: 1
|
|
7737
|
+
maximum: 1,
|
|
7738
|
+
default: 1
|
|
7271
7739
|
}
|
|
7272
7740
|
},
|
|
7273
|
-
required: ["image"
|
|
7741
|
+
required: ["image"],
|
|
7274
7742
|
additionalProperties: false
|
|
7275
7743
|
};
|
|
7276
7744
|
var outputSchema56 = {
|
|
7277
7745
|
type: "object",
|
|
7278
|
-
properties: {
|
|
7279
|
-
image: ImageBinaryOrDataUriSchema({
|
|
7280
|
-
title: "Image",
|
|
7281
|
-
description: "Image with adjusted transparency"
|
|
7282
|
-
})
|
|
7283
|
-
},
|
|
7746
|
+
properties: { image: GpuImageSchema17({ title: "Image", description: "Image with adjusted transparency" }) },
|
|
7284
7747
|
required: ["image"],
|
|
7285
7748
|
additionalProperties: false
|
|
7286
7749
|
};
|
|
7287
7750
|
|
|
7288
|
-
class ImageTransparencyTask extends
|
|
7751
|
+
class ImageTransparencyTask extends ImageFilterTask {
|
|
7289
7752
|
static type = "ImageTransparencyTask";
|
|
7290
7753
|
static category = "Image";
|
|
7291
7754
|
static title = "Set Transparency";
|
|
@@ -7296,133 +7759,19 @@ class ImageTransparencyTask extends Task58 {
|
|
|
7296
7759
|
static outputSchema() {
|
|
7297
7760
|
return outputSchema56;
|
|
7298
7761
|
}
|
|
7299
|
-
|
|
7300
|
-
|
|
7301
|
-
|
|
7302
|
-
async executePreview(input, _context) {
|
|
7303
|
-
return await applyTransparency(input);
|
|
7762
|
+
filterName = "transparency";
|
|
7763
|
+
opParams(input) {
|
|
7764
|
+
return { amount: input.amount ?? 1 };
|
|
7304
7765
|
}
|
|
7305
7766
|
}
|
|
7306
7767
|
Workflow33.prototype.imageTransparency = CreateWorkflow32(ImageTransparencyTask);
|
|
7307
|
-
// src/task/image/ImageWatermarkTask.ts
|
|
7308
|
-
import {
|
|
7309
|
-
CreateWorkflow as CreateWorkflow33,
|
|
7310
|
-
Task as Task59,
|
|
7311
|
-
Workflow as Workflow34
|
|
7312
|
-
} from "@workglow/task-graph";
|
|
7313
|
-
async function applyWatermark(input) {
|
|
7314
|
-
const { spacing = 64, opacity = 0.3, pattern = "diagonal-lines" } = input;
|
|
7315
|
-
const image = await produceImageOutput(input.image, (img) => {
|
|
7316
|
-
const { data: src, width, height, channels: srcCh } = img;
|
|
7317
|
-
const outCh = 4;
|
|
7318
|
-
const dst = new Uint8ClampedArray(width * height * outCh);
|
|
7319
|
-
const lineWidth = 2;
|
|
7320
|
-
const dotRadius = Math.max(2, spacing >> 3);
|
|
7321
|
-
const dotRadiusSq = dotRadius * dotRadius;
|
|
7322
|
-
const half = spacing >> 1;
|
|
7323
|
-
const alpha = Math.round(opacity * 255);
|
|
7324
|
-
for (let y = 0;y < height; y++) {
|
|
7325
|
-
for (let x = 0;x < width; x++) {
|
|
7326
|
-
const srcIdx = (y * width + x) * srcCh;
|
|
7327
|
-
const dstIdx = (y * width + x) * outCh;
|
|
7328
|
-
const sr = src[srcIdx];
|
|
7329
|
-
const sg = srcCh >= 3 ? src[srcIdx + 1] : sr;
|
|
7330
|
-
const sb = srcCh >= 3 ? src[srcIdx + 2] : sr;
|
|
7331
|
-
const sa = srcCh === 4 ? src[srcIdx + 3] : 255;
|
|
7332
|
-
let isPattern = false;
|
|
7333
|
-
if (pattern === "diagonal-lines") {
|
|
7334
|
-
isPattern = (x + y) % spacing < lineWidth;
|
|
7335
|
-
} else if (pattern === "grid") {
|
|
7336
|
-
isPattern = x % spacing < lineWidth || y % spacing < lineWidth;
|
|
7337
|
-
} else {
|
|
7338
|
-
const dx = x % spacing - half;
|
|
7339
|
-
const dy = y % spacing - half;
|
|
7340
|
-
isPattern = dx * dx + dy * dy < dotRadiusSq;
|
|
7341
|
-
}
|
|
7342
|
-
if (isPattern) {
|
|
7343
|
-
const blend = alpha;
|
|
7344
|
-
const invBlend = 255 - blend;
|
|
7345
|
-
dst[dstIdx] = (sr * invBlend + 255 * blend + 127) / 255;
|
|
7346
|
-
dst[dstIdx + 1] = (sg * invBlend + 255 * blend + 127) / 255;
|
|
7347
|
-
dst[dstIdx + 2] = (sb * invBlend + 255 * blend + 127) / 255;
|
|
7348
|
-
dst[dstIdx + 3] = sa;
|
|
7349
|
-
} else {
|
|
7350
|
-
dst[dstIdx] = sr;
|
|
7351
|
-
dst[dstIdx + 1] = sg;
|
|
7352
|
-
dst[dstIdx + 2] = sb;
|
|
7353
|
-
dst[dstIdx + 3] = sa;
|
|
7354
|
-
}
|
|
7355
|
-
}
|
|
7356
|
-
}
|
|
7357
|
-
return { data: dst, width, height, channels: outCh };
|
|
7358
|
-
});
|
|
7359
|
-
return { image };
|
|
7360
|
-
}
|
|
7361
|
-
var inputSchema58 = {
|
|
7362
|
-
type: "object",
|
|
7363
|
-
properties: {
|
|
7364
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Source image" }),
|
|
7365
|
-
spacing: {
|
|
7366
|
-
type: "integer",
|
|
7367
|
-
title: "Spacing",
|
|
7368
|
-
description: "Pattern spacing in pixels",
|
|
7369
|
-
minimum: 8,
|
|
7370
|
-
default: 64
|
|
7371
|
-
},
|
|
7372
|
-
opacity: {
|
|
7373
|
-
type: "number",
|
|
7374
|
-
title: "Opacity",
|
|
7375
|
-
description: "Watermark opacity (0.0-1.0)",
|
|
7376
|
-
minimum: 0,
|
|
7377
|
-
maximum: 1,
|
|
7378
|
-
default: 0.3
|
|
7379
|
-
},
|
|
7380
|
-
pattern: {
|
|
7381
|
-
type: "string",
|
|
7382
|
-
enum: ["diagonal-lines", "grid", "dots"],
|
|
7383
|
-
title: "Pattern",
|
|
7384
|
-
description: "Watermark pattern type",
|
|
7385
|
-
default: "diagonal-lines"
|
|
7386
|
-
}
|
|
7387
|
-
},
|
|
7388
|
-
required: ["image"],
|
|
7389
|
-
additionalProperties: false
|
|
7390
|
-
};
|
|
7391
|
-
var outputSchema57 = {
|
|
7392
|
-
type: "object",
|
|
7393
|
-
properties: {
|
|
7394
|
-
image: ImageBinaryOrDataUriSchema({ title: "Image", description: "Watermarked image" })
|
|
7395
|
-
},
|
|
7396
|
-
required: ["image"],
|
|
7397
|
-
additionalProperties: false
|
|
7398
|
-
};
|
|
7399
|
-
|
|
7400
|
-
class ImageWatermarkTask extends Task59 {
|
|
7401
|
-
static type = "ImageWatermarkTask";
|
|
7402
|
-
static category = "Image";
|
|
7403
|
-
static title = "Add Watermark";
|
|
7404
|
-
static description = "Adds a repeating pattern watermark to an image";
|
|
7405
|
-
static inputSchema() {
|
|
7406
|
-
return inputSchema58;
|
|
7407
|
-
}
|
|
7408
|
-
static outputSchema() {
|
|
7409
|
-
return outputSchema57;
|
|
7410
|
-
}
|
|
7411
|
-
async execute(input, _context) {
|
|
7412
|
-
return await applyWatermark(input);
|
|
7413
|
-
}
|
|
7414
|
-
async executePreview(input, _context) {
|
|
7415
|
-
return await applyWatermark(input);
|
|
7416
|
-
}
|
|
7417
|
-
}
|
|
7418
|
-
Workflow34.prototype.imageWatermark = CreateWorkflow33(ImageWatermarkTask);
|
|
7419
7768
|
// src/task/InputTask.ts
|
|
7420
|
-
import { CreateWorkflow as
|
|
7769
|
+
import { CreateWorkflow as CreateWorkflow33, Task as Task44, Workflow as Workflow34 } from "@workglow/task-graph";
|
|
7421
7770
|
function passthroughInput(input) {
|
|
7422
7771
|
return input;
|
|
7423
7772
|
}
|
|
7424
7773
|
|
|
7425
|
-
class InputTask extends
|
|
7774
|
+
class InputTask extends Task44 {
|
|
7426
7775
|
static type = "InputTask";
|
|
7427
7776
|
static category = "Flow Control";
|
|
7428
7777
|
static title = "Input";
|
|
@@ -7477,15 +7826,15 @@ class InputTask extends Task60 {
|
|
|
7477
7826
|
yield { type: "finish", data: input };
|
|
7478
7827
|
}
|
|
7479
7828
|
}
|
|
7480
|
-
|
|
7829
|
+
Workflow34.prototype.input = CreateWorkflow33(InputTask);
|
|
7481
7830
|
// src/task/JavaScriptTask.ts
|
|
7482
7831
|
import {
|
|
7483
|
-
CreateWorkflow as
|
|
7832
|
+
CreateWorkflow as CreateWorkflow34,
|
|
7484
7833
|
Entitlements as Entitlements7,
|
|
7485
|
-
Task as
|
|
7834
|
+
Task as Task45,
|
|
7486
7835
|
TaskConfigSchema as TaskConfigSchema32,
|
|
7487
7836
|
TaskInvalidInputError as TaskInvalidInputError3,
|
|
7488
|
-
Workflow as
|
|
7837
|
+
Workflow as Workflow35
|
|
7489
7838
|
} from "@workglow/task-graph";
|
|
7490
7839
|
|
|
7491
7840
|
// src/util/acorn.js
|
|
@@ -11981,7 +12330,7 @@ var configSchema = {
|
|
|
11981
12330
|
},
|
|
11982
12331
|
additionalProperties: false
|
|
11983
12332
|
};
|
|
11984
|
-
var
|
|
12333
|
+
var inputSchema58 = {
|
|
11985
12334
|
type: "object",
|
|
11986
12335
|
properties: {
|
|
11987
12336
|
javascript_code: {
|
|
@@ -11995,7 +12344,7 @@ var inputSchema59 = {
|
|
|
11995
12344
|
required: ["javascript_code"],
|
|
11996
12345
|
additionalProperties: true
|
|
11997
12346
|
};
|
|
11998
|
-
var
|
|
12347
|
+
var outputSchema57 = {
|
|
11999
12348
|
type: "object",
|
|
12000
12349
|
properties: {
|
|
12001
12350
|
output: {
|
|
@@ -12007,7 +12356,7 @@ var outputSchema58 = {
|
|
|
12007
12356
|
additionalProperties: false
|
|
12008
12357
|
};
|
|
12009
12358
|
|
|
12010
|
-
class JavaScriptTask extends
|
|
12359
|
+
class JavaScriptTask extends Task45 {
|
|
12011
12360
|
static type = "JavaScriptTask";
|
|
12012
12361
|
static category = "Utility";
|
|
12013
12362
|
static title = "JavaScript Interpreter";
|
|
@@ -12028,10 +12377,10 @@ class JavaScriptTask extends Task61 {
|
|
|
12028
12377
|
return configSchema;
|
|
12029
12378
|
}
|
|
12030
12379
|
static inputSchema() {
|
|
12031
|
-
return
|
|
12380
|
+
return inputSchema58;
|
|
12032
12381
|
}
|
|
12033
12382
|
static outputSchema() {
|
|
12034
|
-
return
|
|
12383
|
+
return outputSchema57;
|
|
12035
12384
|
}
|
|
12036
12385
|
inputSchema() {
|
|
12037
12386
|
if (this.config?.javascript_code) {
|
|
@@ -12044,7 +12393,7 @@ class JavaScriptTask extends Task61 {
|
|
|
12044
12393
|
additionalProperties: true
|
|
12045
12394
|
};
|
|
12046
12395
|
}
|
|
12047
|
-
return
|
|
12396
|
+
return inputSchema58;
|
|
12048
12397
|
}
|
|
12049
12398
|
async execute(input2) {
|
|
12050
12399
|
return runJavaScript(input2, this.config.javascript_code);
|
|
@@ -12056,14 +12405,14 @@ class JavaScriptTask extends Task61 {
|
|
|
12056
12405
|
var javaScript = (input2, config = {}) => {
|
|
12057
12406
|
return new JavaScriptTask(config).run(input2);
|
|
12058
12407
|
};
|
|
12059
|
-
|
|
12408
|
+
Workflow35.prototype.javaScript = CreateWorkflow34(JavaScriptTask);
|
|
12060
12409
|
// src/task/JsonPathTask.ts
|
|
12061
12410
|
import {
|
|
12062
|
-
CreateWorkflow as
|
|
12063
|
-
Task as
|
|
12064
|
-
Workflow as
|
|
12411
|
+
CreateWorkflow as CreateWorkflow35,
|
|
12412
|
+
Task as Task46,
|
|
12413
|
+
Workflow as Workflow36
|
|
12065
12414
|
} from "@workglow/task-graph";
|
|
12066
|
-
var
|
|
12415
|
+
var inputSchema59 = {
|
|
12067
12416
|
type: "object",
|
|
12068
12417
|
properties: {
|
|
12069
12418
|
value: {
|
|
@@ -12079,7 +12428,7 @@ var inputSchema60 = {
|
|
|
12079
12428
|
required: ["value", "path"],
|
|
12080
12429
|
additionalProperties: false
|
|
12081
12430
|
};
|
|
12082
|
-
var
|
|
12431
|
+
var outputSchema58 = {
|
|
12083
12432
|
type: "object",
|
|
12084
12433
|
properties: {
|
|
12085
12434
|
result: {
|
|
@@ -12116,16 +12465,16 @@ function extractJsonPath(value, path) {
|
|
|
12116
12465
|
return resolvePath(value, segments);
|
|
12117
12466
|
}
|
|
12118
12467
|
|
|
12119
|
-
class JsonPathTask extends
|
|
12468
|
+
class JsonPathTask extends Task46 {
|
|
12120
12469
|
static type = "JsonPathTask";
|
|
12121
12470
|
static category = "Utility";
|
|
12122
12471
|
static title = "JSON Path";
|
|
12123
12472
|
static description = "Extracts a value from an object using a dot-notation path";
|
|
12124
12473
|
static inputSchema() {
|
|
12125
|
-
return
|
|
12474
|
+
return inputSchema59;
|
|
12126
12475
|
}
|
|
12127
12476
|
static outputSchema() {
|
|
12128
|
-
return
|
|
12477
|
+
return outputSchema58;
|
|
12129
12478
|
}
|
|
12130
12479
|
async execute(input2, _context) {
|
|
12131
12480
|
return { result: extractJsonPath(input2.value, input2.path) };
|
|
@@ -12134,18 +12483,18 @@ class JsonPathTask extends Task62 {
|
|
|
12134
12483
|
return { result: extractJsonPath(input2.value, input2.path) };
|
|
12135
12484
|
}
|
|
12136
12485
|
}
|
|
12137
|
-
|
|
12486
|
+
Workflow36.prototype.jsonPath = CreateWorkflow35(JsonPathTask);
|
|
12138
12487
|
// src/task/JsonTask.ts
|
|
12139
12488
|
import {
|
|
12140
12489
|
createGraphFromDependencyJSON,
|
|
12141
12490
|
createGraphFromGraphJSON,
|
|
12142
|
-
CreateWorkflow as
|
|
12491
|
+
CreateWorkflow as CreateWorkflow36,
|
|
12143
12492
|
Dataflow,
|
|
12144
12493
|
GraphAsTask as GraphAsTask2,
|
|
12145
12494
|
TaskConfigurationError as TaskConfigurationError3,
|
|
12146
|
-
Workflow as
|
|
12495
|
+
Workflow as Workflow37
|
|
12147
12496
|
} from "@workglow/task-graph";
|
|
12148
|
-
var
|
|
12497
|
+
var inputSchema60 = {
|
|
12149
12498
|
type: "object",
|
|
12150
12499
|
properties: {
|
|
12151
12500
|
json: {
|
|
@@ -12156,7 +12505,7 @@ var inputSchema61 = {
|
|
|
12156
12505
|
},
|
|
12157
12506
|
additionalProperties: false
|
|
12158
12507
|
};
|
|
12159
|
-
var
|
|
12508
|
+
var outputSchema59 = {
|
|
12160
12509
|
type: "object",
|
|
12161
12510
|
properties: {
|
|
12162
12511
|
output: {
|
|
@@ -12173,10 +12522,10 @@ class JsonTask extends GraphAsTask2 {
|
|
|
12173
12522
|
static title = "JSON Task";
|
|
12174
12523
|
static description = "A task that creates and manages task graphs from JSON configurations";
|
|
12175
12524
|
static inputSchema() {
|
|
12176
|
-
return
|
|
12525
|
+
return inputSchema60;
|
|
12177
12526
|
}
|
|
12178
12527
|
static outputSchema() {
|
|
12179
|
-
return
|
|
12528
|
+
return outputSchema59;
|
|
12180
12529
|
}
|
|
12181
12530
|
regenerateGraph() {
|
|
12182
12531
|
if (!this.runInputData.json)
|
|
@@ -12210,15 +12559,15 @@ class JsonTask extends GraphAsTask2 {
|
|
|
12210
12559
|
var json = (input2, config = {}) => {
|
|
12211
12560
|
return new JsonTask(config).run(input2);
|
|
12212
12561
|
};
|
|
12213
|
-
|
|
12562
|
+
Workflow37.prototype.json = CreateWorkflow36(JsonTask);
|
|
12214
12563
|
// src/task/LambdaTask.ts
|
|
12215
12564
|
import {
|
|
12216
|
-
CreateWorkflow as
|
|
12565
|
+
CreateWorkflow as CreateWorkflow37,
|
|
12217
12566
|
DATAFLOW_ALL_PORTS,
|
|
12218
|
-
Task as
|
|
12567
|
+
Task as Task47,
|
|
12219
12568
|
TaskConfigSchema as TaskConfigSchema33,
|
|
12220
12569
|
TaskConfigurationError as TaskConfigurationError4,
|
|
12221
|
-
Workflow as
|
|
12570
|
+
Workflow as Workflow38
|
|
12222
12571
|
} from "@workglow/task-graph";
|
|
12223
12572
|
var lambdaTaskConfigSchema = {
|
|
12224
12573
|
type: "object",
|
|
@@ -12229,7 +12578,7 @@ var lambdaTaskConfigSchema = {
|
|
|
12229
12578
|
},
|
|
12230
12579
|
additionalProperties: false
|
|
12231
12580
|
};
|
|
12232
|
-
var
|
|
12581
|
+
var inputSchema61 = {
|
|
12233
12582
|
type: "object",
|
|
12234
12583
|
properties: {
|
|
12235
12584
|
[DATAFLOW_ALL_PORTS]: {
|
|
@@ -12239,7 +12588,7 @@ var inputSchema62 = {
|
|
|
12239
12588
|
},
|
|
12240
12589
|
additionalProperties: true
|
|
12241
12590
|
};
|
|
12242
|
-
var
|
|
12591
|
+
var outputSchema60 = {
|
|
12243
12592
|
type: "object",
|
|
12244
12593
|
properties: {
|
|
12245
12594
|
[DATAFLOW_ALL_PORTS]: {
|
|
@@ -12250,7 +12599,7 @@ var outputSchema61 = {
|
|
|
12250
12599
|
additionalProperties: true
|
|
12251
12600
|
};
|
|
12252
12601
|
|
|
12253
|
-
class LambdaTask extends
|
|
12602
|
+
class LambdaTask extends Task47 {
|
|
12254
12603
|
static type = "LambdaTask";
|
|
12255
12604
|
static title = "Lambda Task";
|
|
12256
12605
|
static description = "A task that wraps a provided function and its input";
|
|
@@ -12260,10 +12609,10 @@ class LambdaTask extends Task63 {
|
|
|
12260
12609
|
return lambdaTaskConfigSchema;
|
|
12261
12610
|
}
|
|
12262
12611
|
static inputSchema() {
|
|
12263
|
-
return
|
|
12612
|
+
return inputSchema61;
|
|
12264
12613
|
}
|
|
12265
12614
|
static outputSchema() {
|
|
12266
|
-
return
|
|
12615
|
+
return outputSchema60;
|
|
12267
12616
|
}
|
|
12268
12617
|
canSerializeConfig() {
|
|
12269
12618
|
return false;
|
|
@@ -12297,14 +12646,14 @@ function lambda(input2, config) {
|
|
|
12297
12646
|
const task = new LambdaTask({ ...config, defaults: input2 });
|
|
12298
12647
|
return task.run();
|
|
12299
12648
|
}
|
|
12300
|
-
|
|
12649
|
+
Workflow38.prototype.lambda = CreateWorkflow37(LambdaTask);
|
|
12301
12650
|
// src/task/mcp/McpListTask.ts
|
|
12302
12651
|
import {
|
|
12303
|
-
CreateWorkflow as
|
|
12652
|
+
CreateWorkflow as CreateWorkflow38,
|
|
12304
12653
|
Entitlements as Entitlements8,
|
|
12305
12654
|
mergeEntitlements as mergeEntitlements4,
|
|
12306
|
-
Task as
|
|
12307
|
-
Workflow as
|
|
12655
|
+
Task as Task48,
|
|
12656
|
+
Workflow as Workflow39
|
|
12308
12657
|
} from "@workglow/task-graph";
|
|
12309
12658
|
|
|
12310
12659
|
// src/util/getMcpServerTransport.ts
|
|
@@ -12478,7 +12827,7 @@ var outputSchemaAll = {
|
|
|
12478
12827
|
additionalProperties: false
|
|
12479
12828
|
};
|
|
12480
12829
|
|
|
12481
|
-
class McpListTask extends
|
|
12830
|
+
class McpListTask extends Task48 {
|
|
12482
12831
|
static type = "McpListTask";
|
|
12483
12832
|
static category = "MCP";
|
|
12484
12833
|
static title = "MCP List";
|
|
@@ -12585,15 +12934,15 @@ class McpListTask extends Task64 {
|
|
|
12585
12934
|
var mcpList = async (input2, config = {}) => {
|
|
12586
12935
|
return new McpListTask(config).run(input2);
|
|
12587
12936
|
};
|
|
12588
|
-
|
|
12937
|
+
Workflow39.prototype.mcpList = CreateWorkflow38(McpListTask);
|
|
12589
12938
|
// src/task/mcp/McpPromptGetTask.ts
|
|
12590
12939
|
import {
|
|
12591
|
-
CreateWorkflow as
|
|
12940
|
+
CreateWorkflow as CreateWorkflow39,
|
|
12592
12941
|
Entitlements as Entitlements9,
|
|
12593
12942
|
mergeEntitlements as mergeEntitlements5,
|
|
12594
|
-
Task as
|
|
12943
|
+
Task as Task49,
|
|
12595
12944
|
TaskConfigSchema as TaskConfigSchema34,
|
|
12596
|
-
Workflow as
|
|
12945
|
+
Workflow as Workflow40
|
|
12597
12946
|
} from "@workglow/task-graph";
|
|
12598
12947
|
var annotationsSchema = {
|
|
12599
12948
|
type: "object",
|
|
@@ -12728,7 +13077,7 @@ var fallbackInputSchema = {
|
|
|
12728
13077
|
additionalProperties: false
|
|
12729
13078
|
};
|
|
12730
13079
|
|
|
12731
|
-
class McpPromptGetTask extends
|
|
13080
|
+
class McpPromptGetTask extends Task49 {
|
|
12732
13081
|
static type = "McpPromptGetTask";
|
|
12733
13082
|
static category = "MCP";
|
|
12734
13083
|
static title = "MCP Get Prompt";
|
|
@@ -12851,15 +13200,15 @@ class McpPromptGetTask extends Task65 {
|
|
|
12851
13200
|
var mcpPromptGet = async (input2, config) => {
|
|
12852
13201
|
return new McpPromptGetTask(config).run(input2);
|
|
12853
13202
|
};
|
|
12854
|
-
|
|
13203
|
+
Workflow40.prototype.mcpPromptGet = CreateWorkflow39(McpPromptGetTask);
|
|
12855
13204
|
// src/task/mcp/McpResourceReadTask.ts
|
|
12856
13205
|
import {
|
|
12857
|
-
CreateWorkflow as
|
|
13206
|
+
CreateWorkflow as CreateWorkflow40,
|
|
12858
13207
|
Entitlements as Entitlements10,
|
|
12859
13208
|
mergeEntitlements as mergeEntitlements6,
|
|
12860
|
-
Task as
|
|
13209
|
+
Task as Task50,
|
|
12861
13210
|
TaskConfigSchema as TaskConfigSchema35,
|
|
12862
|
-
Workflow as
|
|
13211
|
+
Workflow as Workflow41
|
|
12863
13212
|
} from "@workglow/task-graph";
|
|
12864
13213
|
var contentItemSchema = {
|
|
12865
13214
|
anyOf: [
|
|
@@ -12887,12 +13236,12 @@ var contentItemSchema = {
|
|
|
12887
13236
|
}
|
|
12888
13237
|
]
|
|
12889
13238
|
};
|
|
12890
|
-
var
|
|
13239
|
+
var inputSchema62 = {
|
|
12891
13240
|
type: "object",
|
|
12892
13241
|
properties: {},
|
|
12893
13242
|
additionalProperties: false
|
|
12894
13243
|
};
|
|
12895
|
-
var
|
|
13244
|
+
var outputSchema61 = {
|
|
12896
13245
|
type: "object",
|
|
12897
13246
|
properties: {
|
|
12898
13247
|
contents: {
|
|
@@ -12906,7 +13255,7 @@ var outputSchema62 = {
|
|
|
12906
13255
|
additionalProperties: false
|
|
12907
13256
|
};
|
|
12908
13257
|
|
|
12909
|
-
class McpResourceReadTask extends
|
|
13258
|
+
class McpResourceReadTask extends Task50 {
|
|
12910
13259
|
static type = "McpResourceReadTask";
|
|
12911
13260
|
static category = "MCP";
|
|
12912
13261
|
static title = "MCP Read Resource";
|
|
@@ -12939,10 +13288,10 @@ class McpResourceReadTask extends Task66 {
|
|
|
12939
13288
|
});
|
|
12940
13289
|
}
|
|
12941
13290
|
static inputSchema() {
|
|
12942
|
-
return
|
|
13291
|
+
return inputSchema62;
|
|
12943
13292
|
}
|
|
12944
13293
|
static outputSchema() {
|
|
12945
|
-
return
|
|
13294
|
+
return outputSchema61;
|
|
12946
13295
|
}
|
|
12947
13296
|
static configSchema() {
|
|
12948
13297
|
const { mcpServerConfigSchema: mcpServerConfigSchema2 } = getMcpTaskDeps();
|
|
@@ -12979,13 +13328,13 @@ class McpResourceReadTask extends Task66 {
|
|
|
12979
13328
|
var mcpResourceRead = async (config) => {
|
|
12980
13329
|
return new McpResourceReadTask(config).run({});
|
|
12981
13330
|
};
|
|
12982
|
-
|
|
13331
|
+
Workflow41.prototype.mcpResourceRead = CreateWorkflow40(McpResourceReadTask);
|
|
12983
13332
|
// src/task/mcp/McpSearchTask.ts
|
|
12984
13333
|
import {
|
|
12985
|
-
CreateWorkflow as
|
|
13334
|
+
CreateWorkflow as CreateWorkflow41,
|
|
12986
13335
|
Entitlements as Entitlements11,
|
|
12987
|
-
Task as
|
|
12988
|
-
Workflow as
|
|
13336
|
+
Task as Task51,
|
|
13337
|
+
Workflow as Workflow42
|
|
12989
13338
|
} from "@workglow/task-graph";
|
|
12990
13339
|
var MCP_REGISTRY_BASE = "https://registry.modelcontextprotocol.io/v0.1";
|
|
12991
13340
|
var McpSearchInputSchema = {
|
|
@@ -13125,7 +13474,7 @@ async function searchMcpRegistry(query, signal) {
|
|
|
13125
13474
|
return page.results;
|
|
13126
13475
|
}
|
|
13127
13476
|
|
|
13128
|
-
class McpSearchTask extends
|
|
13477
|
+
class McpSearchTask extends Task51 {
|
|
13129
13478
|
static type = "McpSearchTask";
|
|
13130
13479
|
static category = "MCP";
|
|
13131
13480
|
static title = "MCP Search";
|
|
@@ -13152,15 +13501,15 @@ class McpSearchTask extends Task67 {
|
|
|
13152
13501
|
var mcpSearch = (input2, config) => {
|
|
13153
13502
|
return new McpSearchTask(config).run(input2);
|
|
13154
13503
|
};
|
|
13155
|
-
|
|
13504
|
+
Workflow42.prototype.mcpSearch = CreateWorkflow41(McpSearchTask);
|
|
13156
13505
|
// src/task/mcp/McpToolCallTask.ts
|
|
13157
13506
|
import {
|
|
13158
|
-
CreateWorkflow as
|
|
13507
|
+
CreateWorkflow as CreateWorkflow42,
|
|
13159
13508
|
Entitlements as Entitlements12,
|
|
13160
13509
|
mergeEntitlements as mergeEntitlements7,
|
|
13161
|
-
Task as
|
|
13510
|
+
Task as Task52,
|
|
13162
13511
|
TaskConfigSchema as TaskConfigSchema36,
|
|
13163
|
-
Workflow as
|
|
13512
|
+
Workflow as Workflow43
|
|
13164
13513
|
} from "@workglow/task-graph";
|
|
13165
13514
|
var annotationsSchema2 = {
|
|
13166
13515
|
type: "object",
|
|
@@ -13287,7 +13636,7 @@ var fallbackInputSchema2 = {
|
|
|
13287
13636
|
additionalProperties: true
|
|
13288
13637
|
};
|
|
13289
13638
|
|
|
13290
|
-
class McpToolCallTask extends
|
|
13639
|
+
class McpToolCallTask extends Task52 {
|
|
13291
13640
|
static type = "McpToolCallTask";
|
|
13292
13641
|
static category = "MCP";
|
|
13293
13642
|
static title = "MCP Call Tool";
|
|
@@ -13425,7 +13774,7 @@ class McpToolCallTask extends Task68 {
|
|
|
13425
13774
|
var mcpToolCall = async (input2, config) => {
|
|
13426
13775
|
return new McpToolCallTask(config).run(input2);
|
|
13427
13776
|
};
|
|
13428
|
-
|
|
13777
|
+
Workflow43.prototype.mcpToolCall = CreateWorkflow42(McpToolCallTask);
|
|
13429
13778
|
// src/task/McpElicitationConnector.ts
|
|
13430
13779
|
function defaultAbortError() {
|
|
13431
13780
|
const err = new Error("The operation was aborted");
|
|
@@ -13519,13 +13868,13 @@ class McpElicitationConnector {
|
|
|
13519
13868
|
}
|
|
13520
13869
|
}
|
|
13521
13870
|
// src/task/MergeTask.ts
|
|
13522
|
-
import { CreateWorkflow as
|
|
13523
|
-
var
|
|
13871
|
+
import { CreateWorkflow as CreateWorkflow43, Task as Task53, Workflow as Workflow44 } from "@workglow/task-graph";
|
|
13872
|
+
var inputSchema63 = {
|
|
13524
13873
|
type: "object",
|
|
13525
13874
|
properties: {},
|
|
13526
13875
|
additionalProperties: true
|
|
13527
13876
|
};
|
|
13528
|
-
var
|
|
13877
|
+
var outputSchema62 = {
|
|
13529
13878
|
type: "object",
|
|
13530
13879
|
properties: {
|
|
13531
13880
|
output: {
|
|
@@ -13537,17 +13886,17 @@ var outputSchema63 = {
|
|
|
13537
13886
|
additionalProperties: false
|
|
13538
13887
|
};
|
|
13539
13888
|
|
|
13540
|
-
class MergeTask extends
|
|
13889
|
+
class MergeTask extends Task53 {
|
|
13541
13890
|
static type = "MergeTask";
|
|
13542
13891
|
static category = "Utility";
|
|
13543
13892
|
static title = "Merge";
|
|
13544
13893
|
static description = "Merges multiple inputs into a single array output";
|
|
13545
13894
|
static cacheable = true;
|
|
13546
13895
|
static inputSchema() {
|
|
13547
|
-
return
|
|
13896
|
+
return inputSchema63;
|
|
13548
13897
|
}
|
|
13549
13898
|
static outputSchema() {
|
|
13550
|
-
return
|
|
13899
|
+
return outputSchema62;
|
|
13551
13900
|
}
|
|
13552
13901
|
async execute(input2, _context) {
|
|
13553
13902
|
const keys = Object.keys(input2).sort((a, b) => a.localeCompare(b, undefined, { numeric: true }));
|
|
@@ -13561,14 +13910,14 @@ var merge = (input2, config = {}) => {
|
|
|
13561
13910
|
const task = new MergeTask(config);
|
|
13562
13911
|
return task.run(input2);
|
|
13563
13912
|
};
|
|
13564
|
-
|
|
13913
|
+
Workflow44.prototype.merge = CreateWorkflow43(MergeTask);
|
|
13565
13914
|
// src/task/OutputTask.ts
|
|
13566
|
-
import { CreateWorkflow as
|
|
13915
|
+
import { CreateWorkflow as CreateWorkflow44, Task as Task54, Workflow as Workflow45 } from "@workglow/task-graph";
|
|
13567
13916
|
function passthroughInput2(input2) {
|
|
13568
13917
|
return input2;
|
|
13569
13918
|
}
|
|
13570
13919
|
|
|
13571
|
-
class OutputTask extends
|
|
13920
|
+
class OutputTask extends Task54 {
|
|
13572
13921
|
static type = "OutputTask";
|
|
13573
13922
|
static category = "Flow Control";
|
|
13574
13923
|
static title = "Output";
|
|
@@ -13624,13 +13973,13 @@ class OutputTask extends Task70 {
|
|
|
13624
13973
|
yield { type: "finish", data: input2 };
|
|
13625
13974
|
}
|
|
13626
13975
|
}
|
|
13627
|
-
|
|
13976
|
+
Workflow45.prototype.output = CreateWorkflow44(OutputTask);
|
|
13628
13977
|
// src/task/RegexTask.ts
|
|
13629
13978
|
import {
|
|
13630
|
-
CreateWorkflow as
|
|
13631
|
-
Task as
|
|
13979
|
+
CreateWorkflow as CreateWorkflow45,
|
|
13980
|
+
Task as Task55,
|
|
13632
13981
|
TaskInvalidInputError as TaskInvalidInputError4,
|
|
13633
|
-
Workflow as
|
|
13982
|
+
Workflow as Workflow46
|
|
13634
13983
|
} from "@workglow/task-graph";
|
|
13635
13984
|
var MAX_BRACKET_COUNT = 100;
|
|
13636
13985
|
function hasNestedQuantifiers(pattern) {
|
|
@@ -13663,7 +14012,7 @@ function executeRegex(input2) {
|
|
|
13663
14012
|
matches: result.slice(0)
|
|
13664
14013
|
};
|
|
13665
14014
|
}
|
|
13666
|
-
var
|
|
14015
|
+
var inputSchema64 = {
|
|
13667
14016
|
type: "object",
|
|
13668
14017
|
properties: {
|
|
13669
14018
|
value: {
|
|
@@ -13686,7 +14035,7 @@ var inputSchema65 = {
|
|
|
13686
14035
|
required: ["value", "pattern"],
|
|
13687
14036
|
additionalProperties: false
|
|
13688
14037
|
};
|
|
13689
|
-
var
|
|
14038
|
+
var outputSchema63 = {
|
|
13690
14039
|
type: "object",
|
|
13691
14040
|
properties: {
|
|
13692
14041
|
match: {
|
|
@@ -13705,16 +14054,16 @@ var outputSchema64 = {
|
|
|
13705
14054
|
additionalProperties: false
|
|
13706
14055
|
};
|
|
13707
14056
|
|
|
13708
|
-
class RegexTask extends
|
|
14057
|
+
class RegexTask extends Task55 {
|
|
13709
14058
|
static type = "RegexTask";
|
|
13710
14059
|
static category = "String";
|
|
13711
14060
|
static title = "Regex";
|
|
13712
14061
|
static description = "Matches a string against a regular expression pattern";
|
|
13713
14062
|
static inputSchema() {
|
|
13714
|
-
return
|
|
14063
|
+
return inputSchema64;
|
|
13715
14064
|
}
|
|
13716
14065
|
static outputSchema() {
|
|
13717
|
-
return
|
|
14066
|
+
return outputSchema63;
|
|
13718
14067
|
}
|
|
13719
14068
|
async execute(input2, _context) {
|
|
13720
14069
|
return executeRegex(input2);
|
|
@@ -13723,10 +14072,10 @@ class RegexTask extends Task71 {
|
|
|
13723
14072
|
return executeRegex(input2);
|
|
13724
14073
|
}
|
|
13725
14074
|
}
|
|
13726
|
-
|
|
14075
|
+
Workflow46.prototype.regex = CreateWorkflow45(RegexTask);
|
|
13727
14076
|
// src/task/scalar/ScalarAbsTask.ts
|
|
13728
|
-
import { CreateWorkflow as
|
|
13729
|
-
var
|
|
14077
|
+
import { CreateWorkflow as CreateWorkflow46, Task as Task56, Workflow as Workflow47 } from "@workglow/task-graph";
|
|
14078
|
+
var inputSchema65 = {
|
|
13730
14079
|
type: "object",
|
|
13731
14080
|
properties: {
|
|
13732
14081
|
value: {
|
|
@@ -13738,7 +14087,7 @@ var inputSchema66 = {
|
|
|
13738
14087
|
required: ["value"],
|
|
13739
14088
|
additionalProperties: false
|
|
13740
14089
|
};
|
|
13741
|
-
var
|
|
14090
|
+
var outputSchema64 = {
|
|
13742
14091
|
type: "object",
|
|
13743
14092
|
properties: {
|
|
13744
14093
|
result: {
|
|
@@ -13751,25 +14100,25 @@ var outputSchema65 = {
|
|
|
13751
14100
|
additionalProperties: false
|
|
13752
14101
|
};
|
|
13753
14102
|
|
|
13754
|
-
class ScalarAbsTask extends
|
|
14103
|
+
class ScalarAbsTask extends Task56 {
|
|
13755
14104
|
static type = "ScalarAbsTask";
|
|
13756
14105
|
static category = "Math";
|
|
13757
14106
|
static title = "Abs";
|
|
13758
14107
|
static description = "Returns the absolute value of a number";
|
|
13759
14108
|
static inputSchema() {
|
|
13760
|
-
return
|
|
14109
|
+
return inputSchema65;
|
|
13761
14110
|
}
|
|
13762
14111
|
static outputSchema() {
|
|
13763
|
-
return
|
|
14112
|
+
return outputSchema64;
|
|
13764
14113
|
}
|
|
13765
14114
|
async execute(input2, _context) {
|
|
13766
14115
|
return { result: Math.abs(input2.value) };
|
|
13767
14116
|
}
|
|
13768
14117
|
}
|
|
13769
|
-
|
|
14118
|
+
Workflow47.prototype.scalarAbs = CreateWorkflow46(ScalarAbsTask);
|
|
13770
14119
|
// src/task/scalar/ScalarCeilTask.ts
|
|
13771
|
-
import { CreateWorkflow as
|
|
13772
|
-
var
|
|
14120
|
+
import { CreateWorkflow as CreateWorkflow47, Task as Task57, Workflow as Workflow48 } from "@workglow/task-graph";
|
|
14121
|
+
var inputSchema66 = {
|
|
13773
14122
|
type: "object",
|
|
13774
14123
|
properties: {
|
|
13775
14124
|
value: {
|
|
@@ -13781,7 +14130,7 @@ var inputSchema67 = {
|
|
|
13781
14130
|
required: ["value"],
|
|
13782
14131
|
additionalProperties: false
|
|
13783
14132
|
};
|
|
13784
|
-
var
|
|
14133
|
+
var outputSchema65 = {
|
|
13785
14134
|
type: "object",
|
|
13786
14135
|
properties: {
|
|
13787
14136
|
result: {
|
|
@@ -13794,25 +14143,25 @@ var outputSchema66 = {
|
|
|
13794
14143
|
additionalProperties: false
|
|
13795
14144
|
};
|
|
13796
14145
|
|
|
13797
|
-
class ScalarCeilTask extends
|
|
14146
|
+
class ScalarCeilTask extends Task57 {
|
|
13798
14147
|
static type = "ScalarCeilTask";
|
|
13799
14148
|
static category = "Math";
|
|
13800
14149
|
static title = "Ceil";
|
|
13801
14150
|
static description = "Returns the smallest integer greater than or equal to a number";
|
|
13802
14151
|
static inputSchema() {
|
|
13803
|
-
return
|
|
14152
|
+
return inputSchema66;
|
|
13804
14153
|
}
|
|
13805
14154
|
static outputSchema() {
|
|
13806
|
-
return
|
|
14155
|
+
return outputSchema65;
|
|
13807
14156
|
}
|
|
13808
14157
|
async execute(input2, _context) {
|
|
13809
14158
|
return { result: Math.ceil(input2.value) };
|
|
13810
14159
|
}
|
|
13811
14160
|
}
|
|
13812
|
-
|
|
14161
|
+
Workflow48.prototype.scalarCeil = CreateWorkflow47(ScalarCeilTask);
|
|
13813
14162
|
// src/task/scalar/ScalarFloorTask.ts
|
|
13814
|
-
import { CreateWorkflow as
|
|
13815
|
-
var
|
|
14163
|
+
import { CreateWorkflow as CreateWorkflow48, Task as Task58, Workflow as Workflow49 } from "@workglow/task-graph";
|
|
14164
|
+
var inputSchema67 = {
|
|
13816
14165
|
type: "object",
|
|
13817
14166
|
properties: {
|
|
13818
14167
|
value: {
|
|
@@ -13824,7 +14173,7 @@ var inputSchema68 = {
|
|
|
13824
14173
|
required: ["value"],
|
|
13825
14174
|
additionalProperties: false
|
|
13826
14175
|
};
|
|
13827
|
-
var
|
|
14176
|
+
var outputSchema66 = {
|
|
13828
14177
|
type: "object",
|
|
13829
14178
|
properties: {
|
|
13830
14179
|
result: {
|
|
@@ -13837,25 +14186,25 @@ var outputSchema67 = {
|
|
|
13837
14186
|
additionalProperties: false
|
|
13838
14187
|
};
|
|
13839
14188
|
|
|
13840
|
-
class ScalarFloorTask extends
|
|
14189
|
+
class ScalarFloorTask extends Task58 {
|
|
13841
14190
|
static type = "ScalarFloorTask";
|
|
13842
14191
|
static category = "Math";
|
|
13843
14192
|
static title = "Floor";
|
|
13844
14193
|
static description = "Returns the largest integer less than or equal to a number";
|
|
13845
14194
|
static inputSchema() {
|
|
13846
|
-
return
|
|
14195
|
+
return inputSchema67;
|
|
13847
14196
|
}
|
|
13848
14197
|
static outputSchema() {
|
|
13849
|
-
return
|
|
14198
|
+
return outputSchema66;
|
|
13850
14199
|
}
|
|
13851
14200
|
async execute(input2, _context) {
|
|
13852
14201
|
return { result: Math.floor(input2.value) };
|
|
13853
14202
|
}
|
|
13854
14203
|
}
|
|
13855
|
-
|
|
14204
|
+
Workflow49.prototype.scalarFloor = CreateWorkflow48(ScalarFloorTask);
|
|
13856
14205
|
// src/task/scalar/ScalarMaxTask.ts
|
|
13857
|
-
import { CreateWorkflow as
|
|
13858
|
-
var
|
|
14206
|
+
import { CreateWorkflow as CreateWorkflow49, Task as Task59, Workflow as Workflow50 } from "@workglow/task-graph";
|
|
14207
|
+
var inputSchema68 = {
|
|
13859
14208
|
type: "object",
|
|
13860
14209
|
properties: {
|
|
13861
14210
|
values: {
|
|
@@ -13868,7 +14217,7 @@ var inputSchema69 = {
|
|
|
13868
14217
|
required: ["values"],
|
|
13869
14218
|
additionalProperties: false
|
|
13870
14219
|
};
|
|
13871
|
-
var
|
|
14220
|
+
var outputSchema67 = {
|
|
13872
14221
|
type: "object",
|
|
13873
14222
|
properties: {
|
|
13874
14223
|
result: {
|
|
@@ -13881,25 +14230,25 @@ var outputSchema68 = {
|
|
|
13881
14230
|
additionalProperties: false
|
|
13882
14231
|
};
|
|
13883
14232
|
|
|
13884
|
-
class ScalarMaxTask extends
|
|
14233
|
+
class ScalarMaxTask extends Task59 {
|
|
13885
14234
|
static type = "ScalarMaxTask";
|
|
13886
14235
|
static category = "Math";
|
|
13887
14236
|
static title = "Max";
|
|
13888
14237
|
static description = "Returns the largest of the given numbers";
|
|
13889
14238
|
static inputSchema() {
|
|
13890
|
-
return
|
|
14239
|
+
return inputSchema68;
|
|
13891
14240
|
}
|
|
13892
14241
|
static outputSchema() {
|
|
13893
|
-
return
|
|
14242
|
+
return outputSchema67;
|
|
13894
14243
|
}
|
|
13895
14244
|
async execute(input2, _context) {
|
|
13896
14245
|
return { result: Math.max(...input2.values) };
|
|
13897
14246
|
}
|
|
13898
14247
|
}
|
|
13899
|
-
|
|
14248
|
+
Workflow50.prototype.scalarMax = CreateWorkflow49(ScalarMaxTask);
|
|
13900
14249
|
// src/task/scalar/ScalarMinTask.ts
|
|
13901
|
-
import { CreateWorkflow as
|
|
13902
|
-
var
|
|
14250
|
+
import { CreateWorkflow as CreateWorkflow50, Task as Task60, Workflow as Workflow51 } from "@workglow/task-graph";
|
|
14251
|
+
var inputSchema69 = {
|
|
13903
14252
|
type: "object",
|
|
13904
14253
|
properties: {
|
|
13905
14254
|
values: {
|
|
@@ -13912,7 +14261,7 @@ var inputSchema70 = {
|
|
|
13912
14261
|
required: ["values"],
|
|
13913
14262
|
additionalProperties: false
|
|
13914
14263
|
};
|
|
13915
|
-
var
|
|
14264
|
+
var outputSchema68 = {
|
|
13916
14265
|
type: "object",
|
|
13917
14266
|
properties: {
|
|
13918
14267
|
result: {
|
|
@@ -13925,25 +14274,25 @@ var outputSchema69 = {
|
|
|
13925
14274
|
additionalProperties: false
|
|
13926
14275
|
};
|
|
13927
14276
|
|
|
13928
|
-
class ScalarMinTask extends
|
|
14277
|
+
class ScalarMinTask extends Task60 {
|
|
13929
14278
|
static type = "ScalarMinTask";
|
|
13930
14279
|
static category = "Math";
|
|
13931
14280
|
static title = "Min";
|
|
13932
14281
|
static description = "Returns the smallest of the given numbers";
|
|
13933
14282
|
static inputSchema() {
|
|
13934
|
-
return
|
|
14283
|
+
return inputSchema69;
|
|
13935
14284
|
}
|
|
13936
14285
|
static outputSchema() {
|
|
13937
|
-
return
|
|
14286
|
+
return outputSchema68;
|
|
13938
14287
|
}
|
|
13939
14288
|
async execute(input2, _context) {
|
|
13940
14289
|
return { result: Math.min(...input2.values) };
|
|
13941
14290
|
}
|
|
13942
14291
|
}
|
|
13943
|
-
|
|
14292
|
+
Workflow51.prototype.scalarMin = CreateWorkflow50(ScalarMinTask);
|
|
13944
14293
|
// src/task/scalar/ScalarRoundTask.ts
|
|
13945
|
-
import { CreateWorkflow as
|
|
13946
|
-
var
|
|
14294
|
+
import { CreateWorkflow as CreateWorkflow51, Task as Task61, Workflow as Workflow52 } from "@workglow/task-graph";
|
|
14295
|
+
var inputSchema70 = {
|
|
13947
14296
|
type: "object",
|
|
13948
14297
|
properties: {
|
|
13949
14298
|
value: {
|
|
@@ -13955,7 +14304,7 @@ var inputSchema71 = {
|
|
|
13955
14304
|
required: ["value"],
|
|
13956
14305
|
additionalProperties: false
|
|
13957
14306
|
};
|
|
13958
|
-
var
|
|
14307
|
+
var outputSchema69 = {
|
|
13959
14308
|
type: "object",
|
|
13960
14309
|
properties: {
|
|
13961
14310
|
result: {
|
|
@@ -13968,25 +14317,25 @@ var outputSchema70 = {
|
|
|
13968
14317
|
additionalProperties: false
|
|
13969
14318
|
};
|
|
13970
14319
|
|
|
13971
|
-
class ScalarRoundTask extends
|
|
14320
|
+
class ScalarRoundTask extends Task61 {
|
|
13972
14321
|
static type = "ScalarRoundTask";
|
|
13973
14322
|
static category = "Math";
|
|
13974
14323
|
static title = "Round";
|
|
13975
14324
|
static description = "Returns the value of a number rounded to the nearest integer";
|
|
13976
14325
|
static inputSchema() {
|
|
13977
|
-
return
|
|
14326
|
+
return inputSchema70;
|
|
13978
14327
|
}
|
|
13979
14328
|
static outputSchema() {
|
|
13980
|
-
return
|
|
14329
|
+
return outputSchema69;
|
|
13981
14330
|
}
|
|
13982
14331
|
async execute(input2, _context) {
|
|
13983
14332
|
return { result: Math.round(input2.value) };
|
|
13984
14333
|
}
|
|
13985
14334
|
}
|
|
13986
|
-
|
|
14335
|
+
Workflow52.prototype.scalarRound = CreateWorkflow51(ScalarRoundTask);
|
|
13987
14336
|
// src/task/scalar/ScalarTruncTask.ts
|
|
13988
|
-
import { CreateWorkflow as
|
|
13989
|
-
var
|
|
14337
|
+
import { CreateWorkflow as CreateWorkflow52, Task as Task62, Workflow as Workflow53 } from "@workglow/task-graph";
|
|
14338
|
+
var inputSchema71 = {
|
|
13990
14339
|
type: "object",
|
|
13991
14340
|
properties: {
|
|
13992
14341
|
value: {
|
|
@@ -13998,7 +14347,7 @@ var inputSchema72 = {
|
|
|
13998
14347
|
required: ["value"],
|
|
13999
14348
|
additionalProperties: false
|
|
14000
14349
|
};
|
|
14001
|
-
var
|
|
14350
|
+
var outputSchema70 = {
|
|
14002
14351
|
type: "object",
|
|
14003
14352
|
properties: {
|
|
14004
14353
|
result: {
|
|
@@ -14011,25 +14360,25 @@ var outputSchema71 = {
|
|
|
14011
14360
|
additionalProperties: false
|
|
14012
14361
|
};
|
|
14013
14362
|
|
|
14014
|
-
class ScalarTruncTask extends
|
|
14363
|
+
class ScalarTruncTask extends Task62 {
|
|
14015
14364
|
static type = "ScalarTruncTask";
|
|
14016
14365
|
static category = "Math";
|
|
14017
14366
|
static title = "Truncate";
|
|
14018
14367
|
static description = "Returns the integer part of a number by removing fractional digits";
|
|
14019
14368
|
static inputSchema() {
|
|
14020
|
-
return
|
|
14369
|
+
return inputSchema71;
|
|
14021
14370
|
}
|
|
14022
14371
|
static outputSchema() {
|
|
14023
|
-
return
|
|
14372
|
+
return outputSchema70;
|
|
14024
14373
|
}
|
|
14025
14374
|
async execute(input2, _context) {
|
|
14026
14375
|
return { result: Math.trunc(input2.value) };
|
|
14027
14376
|
}
|
|
14028
14377
|
}
|
|
14029
|
-
|
|
14378
|
+
Workflow53.prototype.scalarTrunc = CreateWorkflow52(ScalarTruncTask);
|
|
14030
14379
|
// src/task/SplitTask.ts
|
|
14031
|
-
import { CreateWorkflow as
|
|
14032
|
-
var
|
|
14380
|
+
import { CreateWorkflow as CreateWorkflow53, Task as Task63, Workflow as Workflow54 } from "@workglow/task-graph";
|
|
14381
|
+
var inputSchema72 = {
|
|
14033
14382
|
type: "object",
|
|
14034
14383
|
properties: {
|
|
14035
14384
|
input: {
|
|
@@ -14039,7 +14388,7 @@ var inputSchema73 = {
|
|
|
14039
14388
|
},
|
|
14040
14389
|
additionalProperties: false
|
|
14041
14390
|
};
|
|
14042
|
-
var
|
|
14391
|
+
var outputSchema71 = {
|
|
14043
14392
|
type: "object",
|
|
14044
14393
|
properties: {},
|
|
14045
14394
|
additionalProperties: true
|
|
@@ -14056,17 +14405,17 @@ function fanoutToIndexedOutputs(inputValue) {
|
|
|
14056
14405
|
return output;
|
|
14057
14406
|
}
|
|
14058
14407
|
|
|
14059
|
-
class SplitTask extends
|
|
14408
|
+
class SplitTask extends Task63 {
|
|
14060
14409
|
static type = "SplitTask";
|
|
14061
14410
|
static category = "Utility";
|
|
14062
14411
|
static title = "Split";
|
|
14063
14412
|
static description = "Splits an array into individual outputs, creating one output per element";
|
|
14064
14413
|
static cacheable = false;
|
|
14065
14414
|
static inputSchema() {
|
|
14066
|
-
return
|
|
14415
|
+
return inputSchema72;
|
|
14067
14416
|
}
|
|
14068
14417
|
static outputSchema() {
|
|
14069
|
-
return
|
|
14418
|
+
return outputSchema71;
|
|
14070
14419
|
}
|
|
14071
14420
|
async execute(input2, _context) {
|
|
14072
14421
|
return fanoutToIndexedOutputs(input2.input);
|
|
@@ -14079,22 +14428,22 @@ var split = (input2, config = {}) => {
|
|
|
14079
14428
|
const task = new SplitTask(config);
|
|
14080
14429
|
return task.run(input2);
|
|
14081
14430
|
};
|
|
14082
|
-
|
|
14431
|
+
Workflow54.prototype.split = CreateWorkflow53(SplitTask);
|
|
14083
14432
|
// src/task/string/StringConcatTask.ts
|
|
14084
14433
|
import {
|
|
14085
|
-
CreateWorkflow as
|
|
14086
|
-
Task as
|
|
14087
|
-
Workflow as
|
|
14434
|
+
CreateWorkflow as CreateWorkflow54,
|
|
14435
|
+
Task as Task64,
|
|
14436
|
+
Workflow as Workflow55
|
|
14088
14437
|
} from "@workglow/task-graph";
|
|
14089
14438
|
function concatStrings(input2) {
|
|
14090
14439
|
return Object.values(input2).join("");
|
|
14091
14440
|
}
|
|
14092
|
-
var
|
|
14441
|
+
var inputSchema73 = {
|
|
14093
14442
|
type: "object",
|
|
14094
14443
|
properties: {},
|
|
14095
14444
|
additionalProperties: { type: "string" }
|
|
14096
14445
|
};
|
|
14097
|
-
var
|
|
14446
|
+
var outputSchema72 = {
|
|
14098
14447
|
type: "object",
|
|
14099
14448
|
properties: {
|
|
14100
14449
|
text: {
|
|
@@ -14107,16 +14456,16 @@ var outputSchema73 = {
|
|
|
14107
14456
|
additionalProperties: false
|
|
14108
14457
|
};
|
|
14109
14458
|
|
|
14110
|
-
class StringConcatTask extends
|
|
14459
|
+
class StringConcatTask extends Task64 {
|
|
14111
14460
|
static type = "StringConcatTask";
|
|
14112
14461
|
static category = "String";
|
|
14113
14462
|
static title = "Concat";
|
|
14114
14463
|
static description = "Concatenates all input strings";
|
|
14115
14464
|
static inputSchema() {
|
|
14116
|
-
return
|
|
14465
|
+
return inputSchema73;
|
|
14117
14466
|
}
|
|
14118
14467
|
static outputSchema() {
|
|
14119
|
-
return
|
|
14468
|
+
return outputSchema72;
|
|
14120
14469
|
}
|
|
14121
14470
|
async execute(input2, _context) {
|
|
14122
14471
|
return { text: concatStrings(input2) };
|
|
@@ -14125,17 +14474,17 @@ class StringConcatTask extends Task80 {
|
|
|
14125
14474
|
return { text: concatStrings(input2) };
|
|
14126
14475
|
}
|
|
14127
14476
|
}
|
|
14128
|
-
|
|
14477
|
+
Workflow55.prototype.stringConcat = CreateWorkflow54(StringConcatTask);
|
|
14129
14478
|
// src/task/string/StringIncludesTask.ts
|
|
14130
14479
|
import {
|
|
14131
|
-
CreateWorkflow as
|
|
14132
|
-
Task as
|
|
14133
|
-
Workflow as
|
|
14480
|
+
CreateWorkflow as CreateWorkflow55,
|
|
14481
|
+
Task as Task65,
|
|
14482
|
+
Workflow as Workflow56
|
|
14134
14483
|
} from "@workglow/task-graph";
|
|
14135
14484
|
function stringIncludes(text, search) {
|
|
14136
14485
|
return text.includes(search);
|
|
14137
14486
|
}
|
|
14138
|
-
var
|
|
14487
|
+
var inputSchema74 = {
|
|
14139
14488
|
type: "object",
|
|
14140
14489
|
properties: {
|
|
14141
14490
|
text: {
|
|
@@ -14152,7 +14501,7 @@ var inputSchema75 = {
|
|
|
14152
14501
|
required: ["text", "search"],
|
|
14153
14502
|
additionalProperties: false
|
|
14154
14503
|
};
|
|
14155
|
-
var
|
|
14504
|
+
var outputSchema73 = {
|
|
14156
14505
|
type: "object",
|
|
14157
14506
|
properties: {
|
|
14158
14507
|
included: {
|
|
@@ -14165,16 +14514,16 @@ var outputSchema74 = {
|
|
|
14165
14514
|
additionalProperties: false
|
|
14166
14515
|
};
|
|
14167
14516
|
|
|
14168
|
-
class StringIncludesTask extends
|
|
14517
|
+
class StringIncludesTask extends Task65 {
|
|
14169
14518
|
static type = "StringIncludesTask";
|
|
14170
14519
|
static category = "String";
|
|
14171
14520
|
static title = "Includes";
|
|
14172
14521
|
static description = "Checks if a string contains a substring";
|
|
14173
14522
|
static inputSchema() {
|
|
14174
|
-
return
|
|
14523
|
+
return inputSchema74;
|
|
14175
14524
|
}
|
|
14176
14525
|
static outputSchema() {
|
|
14177
|
-
return
|
|
14526
|
+
return outputSchema73;
|
|
14178
14527
|
}
|
|
14179
14528
|
async execute(input2, _context) {
|
|
14180
14529
|
return { included: stringIncludes(input2.text, input2.search) };
|
|
@@ -14183,17 +14532,17 @@ class StringIncludesTask extends Task81 {
|
|
|
14183
14532
|
return { included: stringIncludes(input2.text, input2.search) };
|
|
14184
14533
|
}
|
|
14185
14534
|
}
|
|
14186
|
-
|
|
14535
|
+
Workflow56.prototype.stringIncludes = CreateWorkflow55(StringIncludesTask);
|
|
14187
14536
|
// src/task/string/StringJoinTask.ts
|
|
14188
14537
|
import {
|
|
14189
|
-
CreateWorkflow as
|
|
14190
|
-
Task as
|
|
14191
|
-
Workflow as
|
|
14538
|
+
CreateWorkflow as CreateWorkflow56,
|
|
14539
|
+
Task as Task66,
|
|
14540
|
+
Workflow as Workflow57
|
|
14192
14541
|
} from "@workglow/task-graph";
|
|
14193
14542
|
function joinStrings(texts, separator) {
|
|
14194
14543
|
return texts.join(separator ?? "");
|
|
14195
14544
|
}
|
|
14196
|
-
var
|
|
14545
|
+
var inputSchema75 = {
|
|
14197
14546
|
type: "object",
|
|
14198
14547
|
properties: {
|
|
14199
14548
|
texts: {
|
|
@@ -14212,7 +14561,7 @@ var inputSchema76 = {
|
|
|
14212
14561
|
required: ["texts"],
|
|
14213
14562
|
additionalProperties: false
|
|
14214
14563
|
};
|
|
14215
|
-
var
|
|
14564
|
+
var outputSchema74 = {
|
|
14216
14565
|
type: "object",
|
|
14217
14566
|
properties: {
|
|
14218
14567
|
text: {
|
|
@@ -14225,16 +14574,16 @@ var outputSchema75 = {
|
|
|
14225
14574
|
additionalProperties: false
|
|
14226
14575
|
};
|
|
14227
14576
|
|
|
14228
|
-
class StringJoinTask extends
|
|
14577
|
+
class StringJoinTask extends Task66 {
|
|
14229
14578
|
static type = "StringJoinTask";
|
|
14230
14579
|
static category = "String";
|
|
14231
14580
|
static title = "Join";
|
|
14232
14581
|
static description = "Joins an array of strings with a separator";
|
|
14233
14582
|
static inputSchema() {
|
|
14234
|
-
return
|
|
14583
|
+
return inputSchema75;
|
|
14235
14584
|
}
|
|
14236
14585
|
static outputSchema() {
|
|
14237
|
-
return
|
|
14586
|
+
return outputSchema74;
|
|
14238
14587
|
}
|
|
14239
14588
|
async execute(input2, _context) {
|
|
14240
14589
|
return { text: joinStrings(input2.texts, input2.separator) };
|
|
@@ -14243,17 +14592,17 @@ class StringJoinTask extends Task82 {
|
|
|
14243
14592
|
return { text: joinStrings(input2.texts, input2.separator) };
|
|
14244
14593
|
}
|
|
14245
14594
|
}
|
|
14246
|
-
|
|
14595
|
+
Workflow57.prototype.stringJoin = CreateWorkflow56(StringJoinTask);
|
|
14247
14596
|
// src/task/string/StringLengthTask.ts
|
|
14248
14597
|
import {
|
|
14249
|
-
CreateWorkflow as
|
|
14250
|
-
Task as
|
|
14251
|
-
Workflow as
|
|
14598
|
+
CreateWorkflow as CreateWorkflow57,
|
|
14599
|
+
Task as Task67,
|
|
14600
|
+
Workflow as Workflow58
|
|
14252
14601
|
} from "@workglow/task-graph";
|
|
14253
14602
|
function stringLength(text) {
|
|
14254
14603
|
return text.length;
|
|
14255
14604
|
}
|
|
14256
|
-
var
|
|
14605
|
+
var inputSchema76 = {
|
|
14257
14606
|
type: "object",
|
|
14258
14607
|
properties: {
|
|
14259
14608
|
text: {
|
|
@@ -14265,7 +14614,7 @@ var inputSchema77 = {
|
|
|
14265
14614
|
required: ["text"],
|
|
14266
14615
|
additionalProperties: false
|
|
14267
14616
|
};
|
|
14268
|
-
var
|
|
14617
|
+
var outputSchema75 = {
|
|
14269
14618
|
type: "object",
|
|
14270
14619
|
properties: {
|
|
14271
14620
|
length: {
|
|
@@ -14278,16 +14627,16 @@ var outputSchema76 = {
|
|
|
14278
14627
|
additionalProperties: false
|
|
14279
14628
|
};
|
|
14280
14629
|
|
|
14281
|
-
class StringLengthTask extends
|
|
14630
|
+
class StringLengthTask extends Task67 {
|
|
14282
14631
|
static type = "StringLengthTask";
|
|
14283
14632
|
static category = "String";
|
|
14284
14633
|
static title = "Length";
|
|
14285
14634
|
static description = "Returns the length of a string";
|
|
14286
14635
|
static inputSchema() {
|
|
14287
|
-
return
|
|
14636
|
+
return inputSchema76;
|
|
14288
14637
|
}
|
|
14289
14638
|
static outputSchema() {
|
|
14290
|
-
return
|
|
14639
|
+
return outputSchema75;
|
|
14291
14640
|
}
|
|
14292
14641
|
async execute(input2, _context) {
|
|
14293
14642
|
return { length: stringLength(input2.text) };
|
|
@@ -14296,17 +14645,17 @@ class StringLengthTask extends Task83 {
|
|
|
14296
14645
|
return { length: stringLength(input2.text) };
|
|
14297
14646
|
}
|
|
14298
14647
|
}
|
|
14299
|
-
|
|
14648
|
+
Workflow58.prototype.stringLength = CreateWorkflow57(StringLengthTask);
|
|
14300
14649
|
// src/task/string/StringLowerCaseTask.ts
|
|
14301
14650
|
import {
|
|
14302
|
-
CreateWorkflow as
|
|
14303
|
-
Task as
|
|
14304
|
-
Workflow as
|
|
14651
|
+
CreateWorkflow as CreateWorkflow58,
|
|
14652
|
+
Task as Task68,
|
|
14653
|
+
Workflow as Workflow59
|
|
14305
14654
|
} from "@workglow/task-graph";
|
|
14306
14655
|
function toLowerCase(text) {
|
|
14307
14656
|
return text.toLowerCase();
|
|
14308
14657
|
}
|
|
14309
|
-
var
|
|
14658
|
+
var inputSchema77 = {
|
|
14310
14659
|
type: "object",
|
|
14311
14660
|
properties: {
|
|
14312
14661
|
text: {
|
|
@@ -14318,7 +14667,7 @@ var inputSchema78 = {
|
|
|
14318
14667
|
required: ["text"],
|
|
14319
14668
|
additionalProperties: false
|
|
14320
14669
|
};
|
|
14321
|
-
var
|
|
14670
|
+
var outputSchema76 = {
|
|
14322
14671
|
type: "object",
|
|
14323
14672
|
properties: {
|
|
14324
14673
|
text: {
|
|
@@ -14331,16 +14680,16 @@ var outputSchema77 = {
|
|
|
14331
14680
|
additionalProperties: false
|
|
14332
14681
|
};
|
|
14333
14682
|
|
|
14334
|
-
class StringLowerCaseTask extends
|
|
14683
|
+
class StringLowerCaseTask extends Task68 {
|
|
14335
14684
|
static type = "StringLowerCaseTask";
|
|
14336
14685
|
static category = "String";
|
|
14337
14686
|
static title = "Lower Case";
|
|
14338
14687
|
static description = "Converts a string to lower case";
|
|
14339
14688
|
static inputSchema() {
|
|
14340
|
-
return
|
|
14689
|
+
return inputSchema77;
|
|
14341
14690
|
}
|
|
14342
14691
|
static outputSchema() {
|
|
14343
|
-
return
|
|
14692
|
+
return outputSchema76;
|
|
14344
14693
|
}
|
|
14345
14694
|
async execute(input2, _context) {
|
|
14346
14695
|
return { text: toLowerCase(input2.text) };
|
|
@@ -14349,17 +14698,17 @@ class StringLowerCaseTask extends Task84 {
|
|
|
14349
14698
|
return { text: toLowerCase(input2.text) };
|
|
14350
14699
|
}
|
|
14351
14700
|
}
|
|
14352
|
-
|
|
14701
|
+
Workflow59.prototype.stringLowerCase = CreateWorkflow58(StringLowerCaseTask);
|
|
14353
14702
|
// src/task/string/StringReplaceTask.ts
|
|
14354
14703
|
import {
|
|
14355
|
-
CreateWorkflow as
|
|
14356
|
-
Task as
|
|
14357
|
-
Workflow as
|
|
14704
|
+
CreateWorkflow as CreateWorkflow59,
|
|
14705
|
+
Task as Task69,
|
|
14706
|
+
Workflow as Workflow60
|
|
14358
14707
|
} from "@workglow/task-graph";
|
|
14359
14708
|
function replaceString(text, search, replace) {
|
|
14360
14709
|
return text.replaceAll(search, replace);
|
|
14361
14710
|
}
|
|
14362
|
-
var
|
|
14711
|
+
var inputSchema78 = {
|
|
14363
14712
|
type: "object",
|
|
14364
14713
|
properties: {
|
|
14365
14714
|
text: {
|
|
@@ -14381,7 +14730,7 @@ var inputSchema79 = {
|
|
|
14381
14730
|
required: ["text", "search", "replace"],
|
|
14382
14731
|
additionalProperties: false
|
|
14383
14732
|
};
|
|
14384
|
-
var
|
|
14733
|
+
var outputSchema77 = {
|
|
14385
14734
|
type: "object",
|
|
14386
14735
|
properties: {
|
|
14387
14736
|
text: {
|
|
@@ -14394,16 +14743,16 @@ var outputSchema78 = {
|
|
|
14394
14743
|
additionalProperties: false
|
|
14395
14744
|
};
|
|
14396
14745
|
|
|
14397
|
-
class StringReplaceTask extends
|
|
14746
|
+
class StringReplaceTask extends Task69 {
|
|
14398
14747
|
static type = "StringReplaceTask";
|
|
14399
14748
|
static category = "String";
|
|
14400
14749
|
static title = "Replace";
|
|
14401
14750
|
static description = "Replaces all occurrences of a substring";
|
|
14402
14751
|
static inputSchema() {
|
|
14403
|
-
return
|
|
14752
|
+
return inputSchema78;
|
|
14404
14753
|
}
|
|
14405
14754
|
static outputSchema() {
|
|
14406
|
-
return
|
|
14755
|
+
return outputSchema77;
|
|
14407
14756
|
}
|
|
14408
14757
|
async execute(input2, _context) {
|
|
14409
14758
|
return { text: replaceString(input2.text, input2.search, input2.replace) };
|
|
@@ -14412,17 +14761,17 @@ class StringReplaceTask extends Task85 {
|
|
|
14412
14761
|
return { text: replaceString(input2.text, input2.search, input2.replace) };
|
|
14413
14762
|
}
|
|
14414
14763
|
}
|
|
14415
|
-
|
|
14764
|
+
Workflow60.prototype.stringReplace = CreateWorkflow59(StringReplaceTask);
|
|
14416
14765
|
// src/task/string/StringSliceTask.ts
|
|
14417
14766
|
import {
|
|
14418
|
-
CreateWorkflow as
|
|
14419
|
-
Task as
|
|
14420
|
-
Workflow as
|
|
14767
|
+
CreateWorkflow as CreateWorkflow60,
|
|
14768
|
+
Task as Task70,
|
|
14769
|
+
Workflow as Workflow61
|
|
14421
14770
|
} from "@workglow/task-graph";
|
|
14422
14771
|
function sliceString(text, start, end) {
|
|
14423
14772
|
return text.slice(start, end);
|
|
14424
14773
|
}
|
|
14425
|
-
var
|
|
14774
|
+
var inputSchema79 = {
|
|
14426
14775
|
type: "object",
|
|
14427
14776
|
properties: {
|
|
14428
14777
|
text: {
|
|
@@ -14444,7 +14793,7 @@ var inputSchema80 = {
|
|
|
14444
14793
|
required: ["text", "start"],
|
|
14445
14794
|
additionalProperties: false
|
|
14446
14795
|
};
|
|
14447
|
-
var
|
|
14796
|
+
var outputSchema78 = {
|
|
14448
14797
|
type: "object",
|
|
14449
14798
|
properties: {
|
|
14450
14799
|
text: {
|
|
@@ -14457,16 +14806,16 @@ var outputSchema79 = {
|
|
|
14457
14806
|
additionalProperties: false
|
|
14458
14807
|
};
|
|
14459
14808
|
|
|
14460
|
-
class StringSliceTask extends
|
|
14809
|
+
class StringSliceTask extends Task70 {
|
|
14461
14810
|
static type = "StringSliceTask";
|
|
14462
14811
|
static category = "String";
|
|
14463
14812
|
static title = "Slice";
|
|
14464
14813
|
static description = "Extracts a substring by start and optional end index";
|
|
14465
14814
|
static inputSchema() {
|
|
14466
|
-
return
|
|
14815
|
+
return inputSchema79;
|
|
14467
14816
|
}
|
|
14468
14817
|
static outputSchema() {
|
|
14469
|
-
return
|
|
14818
|
+
return outputSchema78;
|
|
14470
14819
|
}
|
|
14471
14820
|
async execute(input2, _context) {
|
|
14472
14821
|
return { text: sliceString(input2.text, input2.start, input2.end) };
|
|
@@ -14475,12 +14824,12 @@ class StringSliceTask extends Task86 {
|
|
|
14475
14824
|
return { text: sliceString(input2.text, input2.start, input2.end) };
|
|
14476
14825
|
}
|
|
14477
14826
|
}
|
|
14478
|
-
|
|
14827
|
+
Workflow61.prototype.stringSlice = CreateWorkflow60(StringSliceTask);
|
|
14479
14828
|
// src/task/string/StringTemplateTask.ts
|
|
14480
14829
|
import {
|
|
14481
|
-
CreateWorkflow as
|
|
14482
|
-
Task as
|
|
14483
|
-
Workflow as
|
|
14830
|
+
CreateWorkflow as CreateWorkflow61,
|
|
14831
|
+
Task as Task71,
|
|
14832
|
+
Workflow as Workflow62
|
|
14484
14833
|
} from "@workglow/task-graph";
|
|
14485
14834
|
function renderStringTemplate(template, values) {
|
|
14486
14835
|
let text = template;
|
|
@@ -14489,7 +14838,7 @@ function renderStringTemplate(template, values) {
|
|
|
14489
14838
|
}
|
|
14490
14839
|
return text;
|
|
14491
14840
|
}
|
|
14492
|
-
var
|
|
14841
|
+
var inputSchema80 = {
|
|
14493
14842
|
type: "object",
|
|
14494
14843
|
properties: {
|
|
14495
14844
|
template: {
|
|
@@ -14507,7 +14856,7 @@ var inputSchema81 = {
|
|
|
14507
14856
|
required: ["template", "values"],
|
|
14508
14857
|
additionalProperties: false
|
|
14509
14858
|
};
|
|
14510
|
-
var
|
|
14859
|
+
var outputSchema79 = {
|
|
14511
14860
|
type: "object",
|
|
14512
14861
|
properties: {
|
|
14513
14862
|
text: {
|
|
@@ -14520,16 +14869,16 @@ var outputSchema80 = {
|
|
|
14520
14869
|
additionalProperties: false
|
|
14521
14870
|
};
|
|
14522
14871
|
|
|
14523
|
-
class StringTemplateTask extends
|
|
14872
|
+
class StringTemplateTask extends Task71 {
|
|
14524
14873
|
static type = "StringTemplateTask";
|
|
14525
14874
|
static category = "String";
|
|
14526
14875
|
static title = "Template";
|
|
14527
14876
|
static description = "Replaces {{key}} placeholders in a template string with values";
|
|
14528
14877
|
static inputSchema() {
|
|
14529
|
-
return
|
|
14878
|
+
return inputSchema80;
|
|
14530
14879
|
}
|
|
14531
14880
|
static outputSchema() {
|
|
14532
|
-
return
|
|
14881
|
+
return outputSchema79;
|
|
14533
14882
|
}
|
|
14534
14883
|
async execute(input2, _context) {
|
|
14535
14884
|
return { text: renderStringTemplate(input2.template, input2.values) };
|
|
@@ -14538,17 +14887,17 @@ class StringTemplateTask extends Task87 {
|
|
|
14538
14887
|
return { text: renderStringTemplate(input2.template, input2.values) };
|
|
14539
14888
|
}
|
|
14540
14889
|
}
|
|
14541
|
-
|
|
14890
|
+
Workflow62.prototype.stringTemplate = CreateWorkflow61(StringTemplateTask);
|
|
14542
14891
|
// src/task/string/StringTrimTask.ts
|
|
14543
14892
|
import {
|
|
14544
|
-
CreateWorkflow as
|
|
14545
|
-
Task as
|
|
14546
|
-
Workflow as
|
|
14893
|
+
CreateWorkflow as CreateWorkflow62,
|
|
14894
|
+
Task as Task72,
|
|
14895
|
+
Workflow as Workflow63
|
|
14547
14896
|
} from "@workglow/task-graph";
|
|
14548
14897
|
function trimString(text) {
|
|
14549
14898
|
return text.trim();
|
|
14550
14899
|
}
|
|
14551
|
-
var
|
|
14900
|
+
var inputSchema81 = {
|
|
14552
14901
|
type: "object",
|
|
14553
14902
|
properties: {
|
|
14554
14903
|
text: {
|
|
@@ -14560,7 +14909,7 @@ var inputSchema82 = {
|
|
|
14560
14909
|
required: ["text"],
|
|
14561
14910
|
additionalProperties: false
|
|
14562
14911
|
};
|
|
14563
|
-
var
|
|
14912
|
+
var outputSchema80 = {
|
|
14564
14913
|
type: "object",
|
|
14565
14914
|
properties: {
|
|
14566
14915
|
text: {
|
|
@@ -14573,16 +14922,16 @@ var outputSchema81 = {
|
|
|
14573
14922
|
additionalProperties: false
|
|
14574
14923
|
};
|
|
14575
14924
|
|
|
14576
|
-
class StringTrimTask extends
|
|
14925
|
+
class StringTrimTask extends Task72 {
|
|
14577
14926
|
static type = "StringTrimTask";
|
|
14578
14927
|
static category = "String";
|
|
14579
14928
|
static title = "Trim";
|
|
14580
14929
|
static description = "Removes leading and trailing whitespace from a string";
|
|
14581
14930
|
static inputSchema() {
|
|
14582
|
-
return
|
|
14931
|
+
return inputSchema81;
|
|
14583
14932
|
}
|
|
14584
14933
|
static outputSchema() {
|
|
14585
|
-
return
|
|
14934
|
+
return outputSchema80;
|
|
14586
14935
|
}
|
|
14587
14936
|
async execute(input2, _context) {
|
|
14588
14937
|
return { text: trimString(input2.text) };
|
|
@@ -14591,17 +14940,17 @@ class StringTrimTask extends Task88 {
|
|
|
14591
14940
|
return { text: trimString(input2.text) };
|
|
14592
14941
|
}
|
|
14593
14942
|
}
|
|
14594
|
-
|
|
14943
|
+
Workflow63.prototype.stringTrim = CreateWorkflow62(StringTrimTask);
|
|
14595
14944
|
// src/task/string/StringUpperCaseTask.ts
|
|
14596
14945
|
import {
|
|
14597
|
-
CreateWorkflow as
|
|
14598
|
-
Task as
|
|
14599
|
-
Workflow as
|
|
14946
|
+
CreateWorkflow as CreateWorkflow63,
|
|
14947
|
+
Task as Task73,
|
|
14948
|
+
Workflow as Workflow64
|
|
14600
14949
|
} from "@workglow/task-graph";
|
|
14601
14950
|
function toUpperCase(text) {
|
|
14602
14951
|
return text.toUpperCase();
|
|
14603
14952
|
}
|
|
14604
|
-
var
|
|
14953
|
+
var inputSchema82 = {
|
|
14605
14954
|
type: "object",
|
|
14606
14955
|
properties: {
|
|
14607
14956
|
text: {
|
|
@@ -14613,7 +14962,7 @@ var inputSchema83 = {
|
|
|
14613
14962
|
required: ["text"],
|
|
14614
14963
|
additionalProperties: false
|
|
14615
14964
|
};
|
|
14616
|
-
var
|
|
14965
|
+
var outputSchema81 = {
|
|
14617
14966
|
type: "object",
|
|
14618
14967
|
properties: {
|
|
14619
14968
|
text: {
|
|
@@ -14626,16 +14975,16 @@ var outputSchema82 = {
|
|
|
14626
14975
|
additionalProperties: false
|
|
14627
14976
|
};
|
|
14628
14977
|
|
|
14629
|
-
class StringUpperCaseTask extends
|
|
14978
|
+
class StringUpperCaseTask extends Task73 {
|
|
14630
14979
|
static type = "StringUpperCaseTask";
|
|
14631
14980
|
static category = "String";
|
|
14632
14981
|
static title = "Upper Case";
|
|
14633
14982
|
static description = "Converts a string to upper case";
|
|
14634
14983
|
static inputSchema() {
|
|
14635
|
-
return
|
|
14984
|
+
return inputSchema82;
|
|
14636
14985
|
}
|
|
14637
14986
|
static outputSchema() {
|
|
14638
|
-
return
|
|
14987
|
+
return outputSchema81;
|
|
14639
14988
|
}
|
|
14640
14989
|
async execute(input2, _context) {
|
|
14641
14990
|
return { text: toUpperCase(input2.text) };
|
|
@@ -14644,12 +14993,12 @@ class StringUpperCaseTask extends Task89 {
|
|
|
14644
14993
|
return { text: toUpperCase(input2.text) };
|
|
14645
14994
|
}
|
|
14646
14995
|
}
|
|
14647
|
-
|
|
14996
|
+
Workflow64.prototype.stringUpperCase = CreateWorkflow63(StringUpperCaseTask);
|
|
14648
14997
|
// src/task/TemplateTask.ts
|
|
14649
14998
|
import {
|
|
14650
|
-
CreateWorkflow as
|
|
14651
|
-
Task as
|
|
14652
|
-
Workflow as
|
|
14999
|
+
CreateWorkflow as CreateWorkflow64,
|
|
15000
|
+
Task as Task74,
|
|
15001
|
+
Workflow as Workflow65
|
|
14653
15002
|
} from "@workglow/task-graph";
|
|
14654
15003
|
function renderTemplate(template, values) {
|
|
14655
15004
|
return template.replace(/\{\{([^{}]+)\}\}/g, (_match, expr) => {
|
|
@@ -14669,7 +15018,7 @@ function renderTemplate(template, values) {
|
|
|
14669
15018
|
return defaultValue !== undefined ? defaultValue : "";
|
|
14670
15019
|
});
|
|
14671
15020
|
}
|
|
14672
|
-
var
|
|
15021
|
+
var inputSchema83 = {
|
|
14673
15022
|
type: "object",
|
|
14674
15023
|
properties: {
|
|
14675
15024
|
template: {
|
|
@@ -14687,7 +15036,7 @@ var inputSchema84 = {
|
|
|
14687
15036
|
required: ["template", "values"],
|
|
14688
15037
|
additionalProperties: false
|
|
14689
15038
|
};
|
|
14690
|
-
var
|
|
15039
|
+
var outputSchema82 = {
|
|
14691
15040
|
type: "object",
|
|
14692
15041
|
properties: {
|
|
14693
15042
|
result: {
|
|
@@ -14700,16 +15049,16 @@ var outputSchema83 = {
|
|
|
14700
15049
|
additionalProperties: false
|
|
14701
15050
|
};
|
|
14702
15051
|
|
|
14703
|
-
class TemplateTask extends
|
|
15052
|
+
class TemplateTask extends Task74 {
|
|
14704
15053
|
static type = "TemplateTask";
|
|
14705
15054
|
static category = "Utility";
|
|
14706
15055
|
static title = "Template";
|
|
14707
15056
|
static description = "Renders a template string with {{key}} placeholders and optional defaults";
|
|
14708
15057
|
static inputSchema() {
|
|
14709
|
-
return
|
|
15058
|
+
return inputSchema83;
|
|
14710
15059
|
}
|
|
14711
15060
|
static outputSchema() {
|
|
14712
|
-
return
|
|
15061
|
+
return outputSchema82;
|
|
14713
15062
|
}
|
|
14714
15063
|
async execute(input2, _context) {
|
|
14715
15064
|
return { result: renderTemplate(input2.template, input2.values) };
|
|
@@ -14718,13 +15067,13 @@ class TemplateTask extends Task90 {
|
|
|
14718
15067
|
return { result: renderTemplate(input2.template, input2.values) };
|
|
14719
15068
|
}
|
|
14720
15069
|
}
|
|
14721
|
-
|
|
15070
|
+
Workflow65.prototype.template = CreateWorkflow64(TemplateTask);
|
|
14722
15071
|
// src/task/vector/VectorDistanceTask.ts
|
|
14723
|
-
import { CreateWorkflow as
|
|
15072
|
+
import { CreateWorkflow as CreateWorkflow65, Task as Task75, Workflow as Workflow66 } from "@workglow/task-graph";
|
|
14724
15073
|
import {
|
|
14725
15074
|
TypedArraySchema as TypedArraySchema5
|
|
14726
15075
|
} from "@workglow/util/schema";
|
|
14727
|
-
var
|
|
15076
|
+
var inputSchema84 = {
|
|
14728
15077
|
type: "object",
|
|
14729
15078
|
properties: {
|
|
14730
15079
|
vectors: {
|
|
@@ -14740,7 +15089,7 @@ var inputSchema85 = {
|
|
|
14740
15089
|
required: ["vectors"],
|
|
14741
15090
|
additionalProperties: false
|
|
14742
15091
|
};
|
|
14743
|
-
var
|
|
15092
|
+
var outputSchema83 = {
|
|
14744
15093
|
type: "object",
|
|
14745
15094
|
properties: {
|
|
14746
15095
|
result: {
|
|
@@ -14753,16 +15102,16 @@ var outputSchema84 = {
|
|
|
14753
15102
|
additionalProperties: false
|
|
14754
15103
|
};
|
|
14755
15104
|
|
|
14756
|
-
class VectorDistanceTask extends
|
|
15105
|
+
class VectorDistanceTask extends Task75 {
|
|
14757
15106
|
static type = "VectorDistanceTask";
|
|
14758
15107
|
static category = "Vector";
|
|
14759
15108
|
static title = "Distance";
|
|
14760
15109
|
static description = "Returns the Euclidean distance between the first two vectors";
|
|
14761
15110
|
static inputSchema() {
|
|
14762
|
-
return
|
|
15111
|
+
return inputSchema84;
|
|
14763
15112
|
}
|
|
14764
15113
|
static outputSchema() {
|
|
14765
|
-
return
|
|
15114
|
+
return outputSchema83;
|
|
14766
15115
|
}
|
|
14767
15116
|
async execute(input2, _context) {
|
|
14768
15117
|
const { vectors } = input2;
|
|
@@ -14780,13 +15129,13 @@ class VectorDistanceTask extends Task91 {
|
|
|
14780
15129
|
return { result: Math.sqrt(sumPrecise(diffs)) };
|
|
14781
15130
|
}
|
|
14782
15131
|
}
|
|
14783
|
-
|
|
15132
|
+
Workflow66.prototype.vectorDistance = CreateWorkflow65(VectorDistanceTask);
|
|
14784
15133
|
// src/task/vector/VectorDotProductTask.ts
|
|
14785
|
-
import { CreateWorkflow as
|
|
15134
|
+
import { CreateWorkflow as CreateWorkflow66, Task as Task76, Workflow as Workflow67 } from "@workglow/task-graph";
|
|
14786
15135
|
import {
|
|
14787
15136
|
TypedArraySchema as TypedArraySchema6
|
|
14788
15137
|
} from "@workglow/util/schema";
|
|
14789
|
-
var
|
|
15138
|
+
var inputSchema85 = {
|
|
14790
15139
|
type: "object",
|
|
14791
15140
|
properties: {
|
|
14792
15141
|
vectors: {
|
|
@@ -14802,7 +15151,7 @@ var inputSchema86 = {
|
|
|
14802
15151
|
required: ["vectors"],
|
|
14803
15152
|
additionalProperties: false
|
|
14804
15153
|
};
|
|
14805
|
-
var
|
|
15154
|
+
var outputSchema84 = {
|
|
14806
15155
|
type: "object",
|
|
14807
15156
|
properties: {
|
|
14808
15157
|
result: {
|
|
@@ -14815,16 +15164,16 @@ var outputSchema85 = {
|
|
|
14815
15164
|
additionalProperties: false
|
|
14816
15165
|
};
|
|
14817
15166
|
|
|
14818
|
-
class VectorDotProductTask extends
|
|
15167
|
+
class VectorDotProductTask extends Task76 {
|
|
14819
15168
|
static type = "VectorDotProductTask";
|
|
14820
15169
|
static category = "Vector";
|
|
14821
15170
|
static title = "Dot Product";
|
|
14822
15171
|
static description = "Returns the dot (inner) product of the first two vectors";
|
|
14823
15172
|
static inputSchema() {
|
|
14824
|
-
return
|
|
15173
|
+
return inputSchema85;
|
|
14825
15174
|
}
|
|
14826
15175
|
static outputSchema() {
|
|
14827
|
-
return
|
|
15176
|
+
return outputSchema84;
|
|
14828
15177
|
}
|
|
14829
15178
|
async execute(input2, _context) {
|
|
14830
15179
|
const { vectors } = input2;
|
|
@@ -14839,14 +15188,14 @@ class VectorDotProductTask extends Task92 {
|
|
|
14839
15188
|
return { result: sumPrecise(products) };
|
|
14840
15189
|
}
|
|
14841
15190
|
}
|
|
14842
|
-
|
|
15191
|
+
Workflow67.prototype.vectorDotProduct = CreateWorkflow66(VectorDotProductTask);
|
|
14843
15192
|
// src/task/vector/VectorNormalizeTask.ts
|
|
14844
|
-
import { CreateWorkflow as
|
|
15193
|
+
import { CreateWorkflow as CreateWorkflow67, Task as Task77, Workflow as Workflow68 } from "@workglow/task-graph";
|
|
14845
15194
|
import {
|
|
14846
15195
|
TypedArraySchema as TypedArraySchema7,
|
|
14847
15196
|
normalize
|
|
14848
15197
|
} from "@workglow/util/schema";
|
|
14849
|
-
var
|
|
15198
|
+
var inputSchema86 = {
|
|
14850
15199
|
type: "object",
|
|
14851
15200
|
properties: {
|
|
14852
15201
|
vector: TypedArraySchema7({
|
|
@@ -14857,7 +15206,7 @@ var inputSchema87 = {
|
|
|
14857
15206
|
required: ["vector"],
|
|
14858
15207
|
additionalProperties: false
|
|
14859
15208
|
};
|
|
14860
|
-
var
|
|
15209
|
+
var outputSchema85 = {
|
|
14861
15210
|
type: "object",
|
|
14862
15211
|
properties: {
|
|
14863
15212
|
result: TypedArraySchema7({
|
|
@@ -14869,29 +15218,29 @@ var outputSchema86 = {
|
|
|
14869
15218
|
additionalProperties: false
|
|
14870
15219
|
};
|
|
14871
15220
|
|
|
14872
|
-
class VectorNormalizeTask extends
|
|
15221
|
+
class VectorNormalizeTask extends Task77 {
|
|
14873
15222
|
static type = "VectorNormalizeTask";
|
|
14874
15223
|
static category = "Vector";
|
|
14875
15224
|
static title = "Normalize";
|
|
14876
15225
|
static description = "Returns the L2-normalized (unit length) vector";
|
|
14877
15226
|
static inputSchema() {
|
|
14878
|
-
return
|
|
15227
|
+
return inputSchema86;
|
|
14879
15228
|
}
|
|
14880
15229
|
static outputSchema() {
|
|
14881
|
-
return
|
|
15230
|
+
return outputSchema85;
|
|
14882
15231
|
}
|
|
14883
15232
|
async execute(input2, _context) {
|
|
14884
15233
|
return { result: normalize(input2.vector) };
|
|
14885
15234
|
}
|
|
14886
15235
|
}
|
|
14887
|
-
|
|
15236
|
+
Workflow68.prototype.vectorNormalize = CreateWorkflow67(VectorNormalizeTask);
|
|
14888
15237
|
// src/task/vector/VectorScaleTask.ts
|
|
14889
|
-
import { CreateWorkflow as
|
|
15238
|
+
import { CreateWorkflow as CreateWorkflow68, Task as Task78, Workflow as Workflow69 } from "@workglow/task-graph";
|
|
14890
15239
|
import {
|
|
14891
15240
|
createTypedArrayFrom as createTypedArrayFrom5,
|
|
14892
15241
|
TypedArraySchema as TypedArraySchema8
|
|
14893
15242
|
} from "@workglow/util/schema";
|
|
14894
|
-
var
|
|
15243
|
+
var inputSchema87 = {
|
|
14895
15244
|
type: "object",
|
|
14896
15245
|
properties: {
|
|
14897
15246
|
vector: TypedArraySchema8({
|
|
@@ -14907,7 +15256,7 @@ var inputSchema88 = {
|
|
|
14907
15256
|
required: ["vector", "scalar"],
|
|
14908
15257
|
additionalProperties: false
|
|
14909
15258
|
};
|
|
14910
|
-
var
|
|
15259
|
+
var outputSchema86 = {
|
|
14911
15260
|
type: "object",
|
|
14912
15261
|
properties: {
|
|
14913
15262
|
result: TypedArraySchema8({
|
|
@@ -14919,16 +15268,16 @@ var outputSchema87 = {
|
|
|
14919
15268
|
additionalProperties: false
|
|
14920
15269
|
};
|
|
14921
15270
|
|
|
14922
|
-
class VectorScaleTask extends
|
|
15271
|
+
class VectorScaleTask extends Task78 {
|
|
14923
15272
|
static type = "VectorScaleTask";
|
|
14924
15273
|
static category = "Vector";
|
|
14925
15274
|
static title = "Scale";
|
|
14926
15275
|
static description = "Multiplies each element of a vector by a scalar";
|
|
14927
15276
|
static inputSchema() {
|
|
14928
|
-
return
|
|
15277
|
+
return inputSchema87;
|
|
14929
15278
|
}
|
|
14930
15279
|
static outputSchema() {
|
|
14931
|
-
return
|
|
15280
|
+
return outputSchema86;
|
|
14932
15281
|
}
|
|
14933
15282
|
async execute(input2, _context) {
|
|
14934
15283
|
const { vector, scalar } = input2;
|
|
@@ -14936,7 +15285,7 @@ class VectorScaleTask extends Task94 {
|
|
|
14936
15285
|
return { result: createTypedArrayFrom5([vector], values) };
|
|
14937
15286
|
}
|
|
14938
15287
|
}
|
|
14939
|
-
|
|
15288
|
+
Workflow69.prototype.vectorScale = CreateWorkflow68(VectorScaleTask);
|
|
14940
15289
|
|
|
14941
15290
|
// src/common.ts
|
|
14942
15291
|
import { TaskRegistry as TaskRegistry2 } from "@workglow/task-graph";
|
|
@@ -15037,7 +15386,6 @@ var registerCommonTasks = () => {
|
|
|
15037
15386
|
ImageBorderTask,
|
|
15038
15387
|
ImageTransparencyTask,
|
|
15039
15388
|
ImageBlurTask,
|
|
15040
|
-
ImageWatermarkTask,
|
|
15041
15389
|
ImagePixelateTask,
|
|
15042
15390
|
ImageInvertTask,
|
|
15043
15391
|
ImageBrightnessTask,
|
|
@@ -15054,10 +15402,10 @@ var registerCommonTasks = () => {
|
|
|
15054
15402
|
};
|
|
15055
15403
|
// src/task/FileLoaderTask.ts
|
|
15056
15404
|
import {
|
|
15057
|
-
CreateWorkflow as
|
|
15058
|
-
Task as
|
|
15405
|
+
CreateWorkflow as CreateWorkflow69,
|
|
15406
|
+
Task as Task79,
|
|
15059
15407
|
TaskAbortedError as TaskAbortedError4,
|
|
15060
|
-
Workflow as
|
|
15408
|
+
Workflow as Workflow70
|
|
15061
15409
|
} from "@workglow/task-graph";
|
|
15062
15410
|
var _papaParse;
|
|
15063
15411
|
async function getPapaParse() {
|
|
@@ -15071,7 +15419,7 @@ async function getPapaParse() {
|
|
|
15071
15419
|
}
|
|
15072
15420
|
return _papaParse;
|
|
15073
15421
|
}
|
|
15074
|
-
var
|
|
15422
|
+
var inputSchema88 = {
|
|
15075
15423
|
type: "object",
|
|
15076
15424
|
properties: {
|
|
15077
15425
|
url: {
|
|
@@ -15091,7 +15439,7 @@ var inputSchema89 = {
|
|
|
15091
15439
|
required: ["url"],
|
|
15092
15440
|
additionalProperties: false
|
|
15093
15441
|
};
|
|
15094
|
-
var
|
|
15442
|
+
var outputSchema87 = {
|
|
15095
15443
|
type: "object",
|
|
15096
15444
|
properties: {
|
|
15097
15445
|
text: {
|
|
@@ -15142,17 +15490,17 @@ var outputSchema88 = {
|
|
|
15142
15490
|
additionalProperties: false
|
|
15143
15491
|
};
|
|
15144
15492
|
|
|
15145
|
-
class FileLoaderTask extends
|
|
15493
|
+
class FileLoaderTask extends Task79 {
|
|
15146
15494
|
static type = "FileLoaderTask";
|
|
15147
15495
|
static category = "Document";
|
|
15148
15496
|
static title = "File Loader";
|
|
15149
15497
|
static description = "Load documents from URLs (http://, https://)";
|
|
15150
15498
|
static cacheable = true;
|
|
15151
15499
|
static inputSchema() {
|
|
15152
|
-
return
|
|
15500
|
+
return inputSchema88;
|
|
15153
15501
|
}
|
|
15154
15502
|
static outputSchema() {
|
|
15155
|
-
return
|
|
15503
|
+
return outputSchema87;
|
|
15156
15504
|
}
|
|
15157
15505
|
async execute(input2, context) {
|
|
15158
15506
|
const { url, format = "auto" } = input2;
|
|
@@ -15508,7 +15856,7 @@ class FileLoaderTask extends Task95 {
|
|
|
15508
15856
|
var fileLoader = (input2, config) => {
|
|
15509
15857
|
return new FileLoaderTask(config).run(input2);
|
|
15510
15858
|
};
|
|
15511
|
-
|
|
15859
|
+
Workflow70.prototype.fileLoader = CreateWorkflow69(FileLoaderTask);
|
|
15512
15860
|
// src/browser.ts
|
|
15513
15861
|
import { TaskRegistry as TaskRegistry3 } from "@workglow/task-graph";
|
|
15514
15862
|
registerMcpTaskDeps({
|
|
@@ -15550,9 +15898,9 @@ export {
|
|
|
15550
15898
|
registerMcpTaskDeps,
|
|
15551
15899
|
registerMcpServer,
|
|
15552
15900
|
registerImageRasterCodec2 as registerImageRasterCodec,
|
|
15901
|
+
registerFilterOp33 as registerFilterOp,
|
|
15553
15902
|
registerCommonTasks2 as registerCommonTasks,
|
|
15554
15903
|
registerBrowserDeps,
|
|
15555
|
-
produceImageOutput,
|
|
15556
15904
|
normalizeOutputMimeType,
|
|
15557
15905
|
merge,
|
|
15558
15906
|
mcpTransportTypes,
|
|
@@ -15570,11 +15918,12 @@ export {
|
|
|
15570
15918
|
lambda,
|
|
15571
15919
|
json,
|
|
15572
15920
|
javaScript,
|
|
15921
|
+
hasFilterOp2 as hasFilterOp,
|
|
15573
15922
|
getSafeFetchImpl,
|
|
15574
15923
|
getMcpTaskDeps,
|
|
15575
15924
|
getMcpServerConfig,
|
|
15576
15925
|
getMcpServer,
|
|
15577
|
-
|
|
15926
|
+
getImageRasterCodec,
|
|
15578
15927
|
getGlobalMcpServers,
|
|
15579
15928
|
getGlobalMcpServerRepository,
|
|
15580
15929
|
getBrowserDeps,
|
|
@@ -15591,6 +15940,8 @@ export {
|
|
|
15591
15940
|
assertWithinPixelBudget,
|
|
15592
15941
|
assertWithinByteBudget,
|
|
15593
15942
|
assertIsDataUri,
|
|
15943
|
+
applyFilter3 as applyFilter,
|
|
15944
|
+
_resetFilterRegistryForTests,
|
|
15594
15945
|
VectorSumTask,
|
|
15595
15946
|
VectorSubtractTask,
|
|
15596
15947
|
VectorScaleTask,
|
|
@@ -15651,7 +16002,6 @@ export {
|
|
|
15651
16002
|
JavaScriptTask,
|
|
15652
16003
|
InputTask,
|
|
15653
16004
|
InMemoryMcpServerRepository,
|
|
15654
|
-
ImageWatermarkTask,
|
|
15655
16005
|
ImageTransparencyTask,
|
|
15656
16006
|
ImageTintTask,
|
|
15657
16007
|
ImageThresholdTask,
|
|
@@ -15664,14 +16014,12 @@ export {
|
|
|
15664
16014
|
ImageInvertTask,
|
|
15665
16015
|
ImageGrayscaleTask,
|
|
15666
16016
|
ImageFlipTask,
|
|
16017
|
+
ImageFilterTask,
|
|
15667
16018
|
ImageCropTask,
|
|
15668
16019
|
ImageContrastTask,
|
|
15669
16020
|
ImageBrightnessTask,
|
|
15670
16021
|
ImageBorderTask,
|
|
15671
16022
|
ImageBlurTask,
|
|
15672
|
-
ImageBinarySchemaOptions,
|
|
15673
|
-
ImageBinarySchema,
|
|
15674
|
-
ImageBinaryOrDataUriSchema,
|
|
15675
16023
|
HumanInputTask,
|
|
15676
16024
|
HumanApprovalTask,
|
|
15677
16025
|
HexColorSchema,
|
|
@@ -15681,6 +16029,7 @@ export {
|
|
|
15681
16029
|
DelayTask,
|
|
15682
16030
|
DebugLogTask,
|
|
15683
16031
|
DateFormatTask,
|
|
16032
|
+
CssRgbColorSchema,
|
|
15684
16033
|
CredentialStoreOAuthProvider,
|
|
15685
16034
|
ColorValueSchema,
|
|
15686
16035
|
ColorSchema,
|
|
@@ -15718,4 +16067,4 @@ export {
|
|
|
15718
16067
|
ArrayTask
|
|
15719
16068
|
};
|
|
15720
16069
|
|
|
15721
|
-
//# debugId=
|
|
16070
|
+
//# debugId=D3AD9302546FF7B664756E2164756E21
|