@uploadista/core 0.2.0 → 1.0.0-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{checksum-BjP9nb5b.mjs → checksum-BRjFmTRk.mjs} +2 -2
- package/dist/{checksum-BjP9nb5b.mjs.map → checksum-BRjFmTRk.mjs.map} +1 -1
- package/dist/{checksum-B7RDiO7V.cjs → checksum-BrjQ8GJL.cjs} +1 -1
- package/dist/errors/index.cjs +1 -1
- package/dist/errors/index.d.cts +1 -1
- package/dist/errors/index.d.mts +1 -1
- package/dist/errors/index.mjs +1 -1
- package/dist/flow/index.cjs +1 -1
- package/dist/flow/index.d.cts +3 -2
- package/dist/flow/index.d.mts +8 -5
- package/dist/flow/index.mjs +1 -1
- package/dist/generate-id-BAMRQzMr.d.cts +34 -0
- package/dist/generate-id-BAMRQzMr.d.cts.map +1 -0
- package/dist/generate-id-DuZwLm4m.d.mts +34 -0
- package/dist/generate-id-DuZwLm4m.d.mts.map +1 -0
- package/dist/index.cjs +1 -1
- package/dist/index.d.cts +8 -5
- package/dist/index.d.mts +8 -5
- package/dist/index.mjs +1 -1
- package/dist/middleware-BghazxzH.d.cts +4129 -0
- package/dist/middleware-BghazxzH.d.cts.map +1 -0
- package/dist/middleware-CYizzAhP.d.mts +4129 -0
- package/dist/middleware-CYizzAhP.d.mts.map +1 -0
- package/dist/resolve-upload-metadata-CYl2PHIs.d.mts +4542 -0
- package/dist/resolve-upload-metadata-CYl2PHIs.d.mts.map +1 -0
- package/dist/resolve-upload-metadata-D0qFuyWc.d.cts +4542 -0
- package/dist/resolve-upload-metadata-D0qFuyWc.d.cts.map +1 -0
- package/dist/run-args-CM14Vtzu.cjs +1 -0
- package/dist/run-args-DSKHoSWs.mjs +2 -0
- package/dist/run-args-DSKHoSWs.mjs.map +1 -0
- package/dist/{stream-limiter-BCFULdAM.d.cts → stream-limiter-7wkBVLWT.d.mts} +2 -2
- package/dist/{stream-limiter-BCFULdAM.d.cts.map → stream-limiter-7wkBVLWT.d.mts.map} +1 -1
- package/dist/{stream-limiter-DZ22uIqf.cjs → stream-limiter-B-Y0DTgA.cjs} +1 -1
- package/dist/{stream-limiter-CTJPEJqE.mjs → stream-limiter-CvDuNIyd.mjs} +2 -2
- package/dist/{stream-limiter-CTJPEJqE.mjs.map → stream-limiter-CvDuNIyd.mjs.map} +1 -1
- package/dist/{stream-limiter-Bi7OTbRp.d.mts → stream-limiter-D1KC-6pK.d.cts} +2 -2
- package/dist/{stream-limiter-Bi7OTbRp.d.mts.map → stream-limiter-D1KC-6pK.d.cts.map} +1 -1
- package/dist/streams/index.cjs +1 -1
- package/dist/streams/index.d.cts +1 -1
- package/dist/streams/index.d.mts +2 -2
- package/dist/streams/index.mjs +1 -1
- package/dist/testing/index.cjs +1 -1
- package/dist/testing/index.d.cts +2 -1
- package/dist/testing/index.d.cts.map +1 -1
- package/dist/testing/index.d.mts +7 -4
- package/dist/testing/index.d.mts.map +1 -1
- package/dist/testing/index.mjs +1 -1
- package/dist/{throttle-Da0OA8JT.d.cts → throttle-3FRcr7MU.d.mts} +4 -34
- package/dist/throttle-3FRcr7MU.d.mts.map +1 -0
- package/dist/{throttle-ibiT6E4U.d.mts → throttle-BlH27EGu.d.cts} +4 -34
- package/dist/throttle-BlH27EGu.d.cts.map +1 -0
- package/dist/{throttle-KnkRgZPi.cjs → throttle-Dp59f37i.cjs} +1 -1
- package/dist/{throttle-CnDa3v1k.mjs → throttle-TFY-V41R.mjs} +2 -2
- package/dist/{throttle-CnDa3v1k.mjs.map → throttle-TFY-V41R.mjs.map} +1 -1
- package/dist/types/index.cjs +1 -1
- package/dist/types/index.d.cts +2 -2
- package/dist/types/index.d.mts +3 -5
- package/dist/types/index.mjs +1 -1
- package/dist/upload/index.cjs +1 -1
- package/dist/upload/index.d.cts +1 -1
- package/dist/upload/index.d.mts +4 -4
- package/dist/upload/index.mjs +1 -1
- package/dist/upload-strategy-negotiator-0-dpNIce.d.cts +455 -0
- package/dist/upload-strategy-negotiator-0-dpNIce.d.cts.map +1 -0
- package/dist/upload-strategy-negotiator-BR_o1Ez8.cjs +1 -0
- package/dist/upload-strategy-negotiator-C9MeoOnW.mjs +2 -0
- package/dist/upload-strategy-negotiator-C9MeoOnW.mjs.map +1 -0
- package/dist/upload-strategy-negotiator-CEnlfVgJ.d.mts +455 -0
- package/dist/upload-strategy-negotiator-CEnlfVgJ.d.mts.map +1 -0
- package/dist/{uploadista-error-B-geDgi8.cjs → uploadista-error-CZx1JU_L.cjs} +3 -1
- package/dist/{uploadista-error-Fsfvr2Bb.mjs → uploadista-error-DQ7V1FlX.mjs} +3 -1
- package/dist/uploadista-error-DQ7V1FlX.mjs.map +1 -0
- package/dist/{uploadista-error-BragVhIs.d.mts → uploadista-error-LtiZn-R_.d.mts} +2 -2
- package/dist/{uploadista-error-BragVhIs.d.mts.map → uploadista-error-LtiZn-R_.d.mts.map} +1 -1
- package/dist/{uploadista-error-Cj_pAFck.d.cts → uploadista-error-eZtG4iyf.d.cts} +2 -2
- package/dist/{uploadista-error-Cj_pAFck.d.cts.map → uploadista-error-eZtG4iyf.d.cts.map} +1 -1
- package/dist/utils/index.cjs +1 -1
- package/dist/utils/index.d.cts +2 -1
- package/dist/utils/index.d.mts +3 -2
- package/dist/utils/index.mjs +1 -1
- package/dist/websocket-Br0ijEZA.cjs +1 -0
- package/dist/websocket-DftnHFfN.mjs +2 -0
- package/dist/websocket-DftnHFfN.mjs.map +1 -0
- package/package.json +3 -3
- package/src/errors/uploadista-error.ts +11 -1
- package/src/flow/README.md +115 -0
- package/src/flow/flow-engine.ts +36 -2
- package/src/flow/flow-queue-store.ts +155 -0
- package/src/flow/flow-queue.ts +640 -0
- package/src/flow/index.ts +4 -0
- package/src/flow/types/flow-queue-item.ts +154 -0
- package/src/types/data-store.ts +3 -3
- package/src/types/kv-store.ts +31 -1
- package/src/upload/write-to-store.ts +24 -29
- package/tests/flow-queue-store.test.ts +150 -0
- package/tests/flow-queue.test.ts +308 -0
- package/dist/resolve-upload-metadata-BUVl1LoS.d.cts +0 -8723
- package/dist/resolve-upload-metadata-BUVl1LoS.d.cts.map +0 -1
- package/dist/resolve-upload-metadata-MPDmDfOZ.d.mts +0 -8723
- package/dist/resolve-upload-metadata-MPDmDfOZ.d.mts.map +0 -1
- package/dist/run-args-WD1otVrz.mjs +0 -2
- package/dist/run-args-WD1otVrz.mjs.map +0 -1
- package/dist/run-args-g74p8pEZ.cjs +0 -1
- package/dist/throttle-Da0OA8JT.d.cts.map +0 -1
- package/dist/throttle-ibiT6E4U.d.mts.map +0 -1
- package/dist/upload-strategy-negotiator-BuxPf1sa.mjs +0 -2
- package/dist/upload-strategy-negotiator-BuxPf1sa.mjs.map +0 -1
- package/dist/upload-strategy-negotiator-DfiQ0Fy0.cjs +0 -1
- package/dist/uploadista-error-Fsfvr2Bb.mjs.map +0 -1
- package/dist/websocket-Avz4T8YB.cjs +0 -1
- package/dist/websocket-CdgVhVJs.mjs +0 -2
- package/dist/websocket-CdgVhVJs.mjs.map +0 -1
|
@@ -0,0 +1,4542 @@
|
|
|
1
|
+
import { n as UploadistaError } from "./uploadista-error-LtiZn-R_.mjs";
|
|
2
|
+
import { $ as BaseKvStoreService, Bt as TypedOutput, Dn as NodeType, Dt as FlowCircuitBreakerConfig, G as StreamingConfig, In as DeadLetterCleanupOptions, Ln as DeadLetterCleanupResult, Lt as NodeExecutionResult, Mt as FlowNode, Nt as FlowNodeData, O as WebSocketConnection, Pt as NamingContext, Q as BaseKvStore, Qt as RetryPolicy, S as FlowEventEmitter, Tt as FileNamingConfig, Un as DeadLetterQueueStats, Vn as DeadLetterListOptions, Xn as CircuitBreakerStoreService, Yn as CircuitBreakerStore, et as DeadLetterQueueKVStore, ft as FlowQueueConfig, gt as FlowJob, ht as FlowQueueStats, in as FlowEvent, jn as UploadFile, jt as FlowEdge$1, kt as FlowConfig, mt as FlowQueueItemStatus, nt as FlowQueueKVStore, pt as FlowQueueItem, q as UploadFileDataStores, qn as CircuitBreakerStateValue, rt as KvStore, tt as FlowJobKVStore, zn as DeadLetterItem, zt as TypeCompatibilityChecker } from "./middleware-CYizzAhP.mjs";
|
|
3
|
+
import { i as UploadEngine } from "./upload-strategy-negotiator-CEnlfVgJ.mjs";
|
|
4
|
+
import { Context, Effect, Layer, Option, Stream } from "effect";
|
|
5
|
+
import * as zod from "zod";
|
|
6
|
+
import { z } from "zod";
|
|
7
|
+
import * as zod_v4_core0 from "zod/v4/core";
|
|
8
|
+
|
|
9
|
+
//#region src/flow/circuit-breaker.d.ts
|
|
10
|
+
/**
|
|
11
|
+
* Circuit breaker state machine states.
|
|
12
|
+
*
|
|
13
|
+
* - `closed`: Normal operation, tracking failures in sliding window
|
|
14
|
+
* - `open`: Rejecting all requests immediately, waiting for reset timeout
|
|
15
|
+
* - `half-open`: Allowing limited test requests to probe service health
|
|
16
|
+
*/
|
|
17
|
+
type CircuitBreakerState = "closed" | "open" | "half-open";
|
|
18
|
+
/**
|
|
19
|
+
* Configuration for a circuit breaker.
|
|
20
|
+
*
|
|
21
|
+
* @property enabled - Whether circuit breaker is active (default: false for backward compatibility)
|
|
22
|
+
* @property failureThreshold - Number of failures within window to trip circuit (default: 5)
|
|
23
|
+
* @property resetTimeout - Milliseconds to wait in open state before half-open (default: 30000)
|
|
24
|
+
* @property halfOpenRequests - Number of successful requests in half-open to close (default: 3)
|
|
25
|
+
* @property windowDuration - Sliding window duration in milliseconds (default: 60000)
|
|
26
|
+
* @property fallback - Behavior when circuit is open
|
|
27
|
+
*/
|
|
28
|
+
interface CircuitBreakerConfig {
|
|
29
|
+
/** Whether circuit breaker is active (default: false) */
|
|
30
|
+
enabled?: boolean;
|
|
31
|
+
/** Number of failures within window to trip circuit (default: 5) */
|
|
32
|
+
failureThreshold?: number;
|
|
33
|
+
/** Milliseconds to wait in open state before half-open (default: 30000) */
|
|
34
|
+
resetTimeout?: number;
|
|
35
|
+
/** Number of successful requests in half-open to close (default: 3) */
|
|
36
|
+
halfOpenRequests?: number;
|
|
37
|
+
/** Sliding window duration in milliseconds (default: 60000) */
|
|
38
|
+
windowDuration?: number;
|
|
39
|
+
/** Behavior when circuit is open */
|
|
40
|
+
fallback?: CircuitBreakerFallback;
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Fallback behavior when circuit is open.
|
|
44
|
+
*
|
|
45
|
+
* - `fail`: Fail immediately with CIRCUIT_BREAKER_OPEN error (default)
|
|
46
|
+
* - `skip`: Skip node, pass input through as output
|
|
47
|
+
* - `default`: Return a configured default value
|
|
48
|
+
*/
|
|
49
|
+
type CircuitBreakerFallback = {
|
|
50
|
+
type: "fail";
|
|
51
|
+
} | {
|
|
52
|
+
type: "skip";
|
|
53
|
+
passThrough: true;
|
|
54
|
+
} | {
|
|
55
|
+
type: "default";
|
|
56
|
+
value: unknown;
|
|
57
|
+
};
|
|
58
|
+
/**
|
|
59
|
+
* Event emitted when circuit state changes.
|
|
60
|
+
*/
|
|
61
|
+
interface CircuitBreakerEvent {
|
|
62
|
+
nodeType: string;
|
|
63
|
+
previousState: CircuitBreakerState;
|
|
64
|
+
newState: CircuitBreakerState;
|
|
65
|
+
timestamp: number;
|
|
66
|
+
failureCount?: number;
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Callback type for circuit state change events.
|
|
70
|
+
*/
|
|
71
|
+
type CircuitBreakerEventHandler = (event: CircuitBreakerEvent) => Effect.Effect<void, never, never>;
|
|
72
|
+
/**
|
|
73
|
+
* Default circuit breaker configuration values.
|
|
74
|
+
*/
|
|
75
|
+
declare const DEFAULT_CIRCUIT_BREAKER_CONFIG: Required<Omit<CircuitBreakerConfig, "fallback">> & {
|
|
76
|
+
fallback: CircuitBreakerFallback;
|
|
77
|
+
};
|
|
78
|
+
//#endregion
|
|
79
|
+
//#region src/flow/circuit-breaker-store.d.ts
|
|
80
|
+
/**
|
|
81
|
+
* Creates a CircuitBreakerStore backed by any BaseKvStore.
|
|
82
|
+
*
|
|
83
|
+
* This adapter wraps a generic KV store to provide circuit breaker state
|
|
84
|
+
* storage. It handles:
|
|
85
|
+
* - JSON serialization of state data
|
|
86
|
+
* - Sliding window expiry (checked on read/increment)
|
|
87
|
+
* - Read-modify-write for increment operations
|
|
88
|
+
*
|
|
89
|
+
* Note: This implementation uses read-modify-write for increments, which
|
|
90
|
+
* may have race conditions under high concurrency. This is acceptable for
|
|
91
|
+
* circuit breakers as they tolerate eventual consistency.
|
|
92
|
+
*
|
|
93
|
+
* @param baseStore - The underlying KV store
|
|
94
|
+
* @returns A CircuitBreakerStore implementation
|
|
95
|
+
*
|
|
96
|
+
* @example
|
|
97
|
+
* ```typescript
|
|
98
|
+
* const baseStore = makeRedisBaseKvStore({ redis: redisClient });
|
|
99
|
+
* const cbStore = makeKvCircuitBreakerStore(baseStore);
|
|
100
|
+
*
|
|
101
|
+
* // Use the store
|
|
102
|
+
* yield* cbStore.incrementFailures("describe-image", 60000);
|
|
103
|
+
* ```
|
|
104
|
+
*/
|
|
105
|
+
declare function makeKvCircuitBreakerStore(baseStore: BaseKvStore): CircuitBreakerStore;
|
|
106
|
+
/**
|
|
107
|
+
* Creates an in-memory CircuitBreakerStore.
|
|
108
|
+
*
|
|
109
|
+
* This implementation keeps all state in memory and is suitable for:
|
|
110
|
+
* - Single-instance deployments
|
|
111
|
+
* - Development and testing
|
|
112
|
+
* - Serverless functions (where state is ephemeral anyway)
|
|
113
|
+
*
|
|
114
|
+
* @returns A CircuitBreakerStore backed by in-memory Map
|
|
115
|
+
*
|
|
116
|
+
* @example
|
|
117
|
+
* ```typescript
|
|
118
|
+
* const cbStore = makeMemoryCircuitBreakerStore();
|
|
119
|
+
*
|
|
120
|
+
* // Use for testing
|
|
121
|
+
* yield* cbStore.incrementFailures("test-node", 60000);
|
|
122
|
+
* const state = yield* cbStore.getState("test-node");
|
|
123
|
+
* ```
|
|
124
|
+
*/
|
|
125
|
+
declare function makeMemoryCircuitBreakerStore(): CircuitBreakerStore;
|
|
126
|
+
/**
|
|
127
|
+
* Effect Layer that provides a CircuitBreakerStore backed by the BaseKvStore.
|
|
128
|
+
*
|
|
129
|
+
* Use this layer when you want circuit breaker state to be distributed
|
|
130
|
+
* across multiple instances (e.g., in a cluster).
|
|
131
|
+
*
|
|
132
|
+
* @example
|
|
133
|
+
* ```typescript
|
|
134
|
+
* const program = Effect.gen(function* () {
|
|
135
|
+
* const cbStore = yield* CircuitBreakerStoreService;
|
|
136
|
+
* // ...
|
|
137
|
+
* }).pipe(
|
|
138
|
+
* Effect.provide(kvCircuitBreakerStoreLayer),
|
|
139
|
+
* Effect.provide(redisKvStore({ redis: redisClient }))
|
|
140
|
+
* );
|
|
141
|
+
* ```
|
|
142
|
+
*/
|
|
143
|
+
declare const kvCircuitBreakerStoreLayer: Layer.Layer<CircuitBreakerStoreService, never, BaseKvStoreService>;
|
|
144
|
+
/**
|
|
145
|
+
* Effect Layer that provides an in-memory CircuitBreakerStore.
|
|
146
|
+
*
|
|
147
|
+
* Use this layer for single-instance deployments, development, or testing.
|
|
148
|
+
*
|
|
149
|
+
* @example
|
|
150
|
+
* ```typescript
|
|
151
|
+
* const program = Effect.gen(function* () {
|
|
152
|
+
* const cbStore = yield* CircuitBreakerStoreService;
|
|
153
|
+
* // ...
|
|
154
|
+
* }).pipe(
|
|
155
|
+
* Effect.provide(memoryCircuitBreakerStoreLayer)
|
|
156
|
+
* );
|
|
157
|
+
* ```
|
|
158
|
+
*/
|
|
159
|
+
declare const memoryCircuitBreakerStoreLayer: Layer.Layer<CircuitBreakerStoreService, never, never>;
|
|
160
|
+
//#endregion
|
|
161
|
+
//#region src/flow/distributed-circuit-breaker.d.ts
|
|
162
|
+
/**
|
|
163
|
+
* Result of checking if a request is allowed.
|
|
164
|
+
*/
|
|
165
|
+
interface AllowRequestResult {
|
|
166
|
+
allowed: boolean;
|
|
167
|
+
state: CircuitBreakerStateValue;
|
|
168
|
+
failureCount: number;
|
|
169
|
+
}
|
|
170
|
+
/**
|
|
171
|
+
* Distributed circuit breaker that uses a store for state persistence.
|
|
172
|
+
*
|
|
173
|
+
* Unlike the in-memory CircuitBreaker, this implementation stores all state
|
|
174
|
+
* in a CircuitBreakerStore, allowing multiple instances to share circuit state.
|
|
175
|
+
*
|
|
176
|
+
* All operations are Effect-based since they may involve I/O.
|
|
177
|
+
*
|
|
178
|
+
* @example
|
|
179
|
+
* ```typescript
|
|
180
|
+
* const breaker = new DistributedCircuitBreaker(
|
|
181
|
+
* "describe-image",
|
|
182
|
+
* { enabled: true, failureThreshold: 5 },
|
|
183
|
+
* store
|
|
184
|
+
* );
|
|
185
|
+
*
|
|
186
|
+
* // Check if request is allowed
|
|
187
|
+
* const { allowed, state } = yield* breaker.allowRequest();
|
|
188
|
+
* if (!allowed) {
|
|
189
|
+
* // Handle circuit open
|
|
190
|
+
* }
|
|
191
|
+
*
|
|
192
|
+
* // Record result
|
|
193
|
+
* try {
|
|
194
|
+
* const result = yield* executeNode();
|
|
195
|
+
* yield* breaker.recordSuccess();
|
|
196
|
+
* return result;
|
|
197
|
+
* } catch (error) {
|
|
198
|
+
* yield* breaker.recordFailure(error.message);
|
|
199
|
+
* throw error;
|
|
200
|
+
* }
|
|
201
|
+
* ```
|
|
202
|
+
*/
|
|
203
|
+
declare class DistributedCircuitBreaker {
|
|
204
|
+
private eventHandler?;
|
|
205
|
+
readonly nodeType: string;
|
|
206
|
+
readonly config: Required<Omit<CircuitBreakerConfig, "fallback">> & {
|
|
207
|
+
fallback: CircuitBreakerFallback;
|
|
208
|
+
};
|
|
209
|
+
readonly store: CircuitBreakerStore;
|
|
210
|
+
constructor(nodeType: string, config: CircuitBreakerConfig, store: CircuitBreakerStore);
|
|
211
|
+
/**
|
|
212
|
+
* Sets the event handler for state change notifications.
|
|
213
|
+
*/
|
|
214
|
+
setEventHandler(handler: CircuitBreakerEventHandler): void;
|
|
215
|
+
/**
|
|
216
|
+
* Checks if a request is allowed through the circuit.
|
|
217
|
+
*
|
|
218
|
+
* This method reads state from the store, checks for time-based transitions,
|
|
219
|
+
* and returns whether the request should proceed.
|
|
220
|
+
*/
|
|
221
|
+
allowRequest(): Effect.Effect<AllowRequestResult, UploadistaError>;
|
|
222
|
+
/**
|
|
223
|
+
* Gets the current circuit state from the store.
|
|
224
|
+
*/
|
|
225
|
+
getState(): Effect.Effect<CircuitBreakerStateValue, UploadistaError>;
|
|
226
|
+
/**
|
|
227
|
+
* Gets the current failure count from the store.
|
|
228
|
+
*/
|
|
229
|
+
getFailureCount(): Effect.Effect<number, UploadistaError>;
|
|
230
|
+
/**
|
|
231
|
+
* Records a successful execution.
|
|
232
|
+
*
|
|
233
|
+
* In half-open state, tracks successes toward closing the circuit.
|
|
234
|
+
* In closed state, resets the failure count.
|
|
235
|
+
*/
|
|
236
|
+
recordSuccess(): Effect.Effect<void, UploadistaError>;
|
|
237
|
+
/**
|
|
238
|
+
* Records a failed execution.
|
|
239
|
+
*
|
|
240
|
+
* In closed state, increments failure count and may trip the circuit.
|
|
241
|
+
* In half-open state, immediately reopens the circuit.
|
|
242
|
+
*/
|
|
243
|
+
recordFailure(_errorMessage: string): Effect.Effect<void, UploadistaError>;
|
|
244
|
+
/**
|
|
245
|
+
* Gets the fallback configuration.
|
|
246
|
+
*/
|
|
247
|
+
getFallback(): CircuitBreakerFallback;
|
|
248
|
+
/**
|
|
249
|
+
* Resets the circuit breaker to closed state.
|
|
250
|
+
*/
|
|
251
|
+
reset(): Effect.Effect<void, UploadistaError>;
|
|
252
|
+
/**
|
|
253
|
+
* Transitions to a new state.
|
|
254
|
+
*/
|
|
255
|
+
private transitionTo;
|
|
256
|
+
/**
|
|
257
|
+
* Emits a state change event if handler is set.
|
|
258
|
+
*/
|
|
259
|
+
private emitEvent;
|
|
260
|
+
}
|
|
261
|
+
/**
|
|
262
|
+
* Registry for managing distributed circuit breakers.
|
|
263
|
+
*
|
|
264
|
+
* Unlike the in-memory CircuitBreakerRegistry, this registry creates
|
|
265
|
+
* DistributedCircuitBreaker instances that share state via a store.
|
|
266
|
+
*
|
|
267
|
+
* @example
|
|
268
|
+
* ```typescript
|
|
269
|
+
* const store = makeKvCircuitBreakerStore(baseKvStore);
|
|
270
|
+
* const registry = new DistributedCircuitBreakerRegistry(store);
|
|
271
|
+
*
|
|
272
|
+
* const breaker = registry.getOrCreate("describe-image", {
|
|
273
|
+
* enabled: true,
|
|
274
|
+
* failureThreshold: 5
|
|
275
|
+
* });
|
|
276
|
+
* ```
|
|
277
|
+
*/
|
|
278
|
+
declare class DistributedCircuitBreakerRegistry {
|
|
279
|
+
readonly store: CircuitBreakerStore;
|
|
280
|
+
private breakers;
|
|
281
|
+
private eventHandler?;
|
|
282
|
+
constructor(store: CircuitBreakerStore);
|
|
283
|
+
/**
|
|
284
|
+
* Sets a global event handler for all circuit breakers.
|
|
285
|
+
*/
|
|
286
|
+
setEventHandler(handler: CircuitBreakerEventHandler): void;
|
|
287
|
+
/**
|
|
288
|
+
* Gets an existing circuit breaker or creates a new one.
|
|
289
|
+
*/
|
|
290
|
+
getOrCreate(nodeType: string, config: CircuitBreakerConfig): DistributedCircuitBreaker;
|
|
291
|
+
/**
|
|
292
|
+
* Gets an existing circuit breaker if it exists.
|
|
293
|
+
*/
|
|
294
|
+
get(nodeType: string): DistributedCircuitBreaker | undefined;
|
|
295
|
+
/**
|
|
296
|
+
* Gets statistics for all circuit breakers from the store.
|
|
297
|
+
*/
|
|
298
|
+
getAllStats(): Effect.Effect<Map<string, {
|
|
299
|
+
state: CircuitBreakerStateValue;
|
|
300
|
+
failureCount: number;
|
|
301
|
+
}>, UploadistaError>;
|
|
302
|
+
/**
|
|
303
|
+
* Resets all circuit breakers.
|
|
304
|
+
*/
|
|
305
|
+
resetAll(): Effect.Effect<void, UploadistaError>;
|
|
306
|
+
/**
|
|
307
|
+
* Clears all circuit breakers from the local cache.
|
|
308
|
+
* Note: This does not clear state from the store.
|
|
309
|
+
*/
|
|
310
|
+
clear(): void;
|
|
311
|
+
}
|
|
312
|
+
//#endregion
|
|
313
|
+
//#region src/flow/edge.d.ts
|
|
314
|
+
/**
|
|
315
|
+
* Represents a connection between two nodes in a flow, defining the data flow direction.
|
|
316
|
+
*
|
|
317
|
+
* Edges connect the output of a source node to the input of a target node,
|
|
318
|
+
* enabling data to flow through the processing pipeline in a directed acyclic graph (DAG).
|
|
319
|
+
*/
|
|
320
|
+
type FlowEdge = FlowEdge$1;
|
|
321
|
+
/**
|
|
322
|
+
* Creates a flow edge connecting two nodes in a processing pipeline.
|
|
323
|
+
*
|
|
324
|
+
* Edges define how data flows between nodes. The data output from the source node
|
|
325
|
+
* becomes the input for the target node. For nodes with multiple inputs/outputs,
|
|
326
|
+
* ports can be specified to route data to specific connections.
|
|
327
|
+
*
|
|
328
|
+
* @param config - Edge configuration
|
|
329
|
+
* @param config.source - ID of the source node (data originates here)
|
|
330
|
+
* @param config.target - ID of the target node (data flows to here)
|
|
331
|
+
* @param config.sourcePort - Optional port name on the source node for multi-output nodes
|
|
332
|
+
* @param config.targetPort - Optional port name on the target node for multi-input nodes
|
|
333
|
+
*
|
|
334
|
+
* @returns A FlowEdge object representing the connection
|
|
335
|
+
*
|
|
336
|
+
* @example
|
|
337
|
+
* ```typescript
|
|
338
|
+
* // Simple edge connecting two nodes
|
|
339
|
+
* const edge = createFlowEdge({
|
|
340
|
+
* source: "input-1",
|
|
341
|
+
* target: "process-1"
|
|
342
|
+
* });
|
|
343
|
+
*
|
|
344
|
+
* // Edge with ports for multi-input/output nodes
|
|
345
|
+
* const portEdge = createFlowEdge({
|
|
346
|
+
* source: "multiplex-1",
|
|
347
|
+
* target: "merge-1",
|
|
348
|
+
* sourcePort: "out-a",
|
|
349
|
+
* targetPort: "in-1"
|
|
350
|
+
* });
|
|
351
|
+
* ```
|
|
352
|
+
*/
|
|
353
|
+
declare function createFlowEdge({
|
|
354
|
+
source,
|
|
355
|
+
target,
|
|
356
|
+
sourcePort,
|
|
357
|
+
targetPort
|
|
358
|
+
}: {
|
|
359
|
+
source: string;
|
|
360
|
+
target: string;
|
|
361
|
+
sourcePort?: string;
|
|
362
|
+
targetPort?: string;
|
|
363
|
+
}): FlowEdge;
|
|
364
|
+
//#endregion
|
|
365
|
+
//#region src/flow/flow.d.ts
|
|
366
|
+
/**
|
|
367
|
+
* Serialized flow data for storage and transport.
|
|
368
|
+
* Contains the minimal information needed to reconstruct a flow.
|
|
369
|
+
*
|
|
370
|
+
* @property id - Unique flow identifier
|
|
371
|
+
* @property name - Human-readable flow name
|
|
372
|
+
* @property nodes - Array of node data (without execution logic)
|
|
373
|
+
* @property edges - Connections between nodes defining data flow
|
|
374
|
+
*/
|
|
375
|
+
type FlowData = {
|
|
376
|
+
id: string;
|
|
377
|
+
name: string;
|
|
378
|
+
nodes: FlowNodeData[];
|
|
379
|
+
edges: FlowEdge[];
|
|
380
|
+
};
|
|
381
|
+
/**
|
|
382
|
+
* Extracts serializable flow data from a Flow instance.
|
|
383
|
+
* Useful for storing flow definitions or sending them over the network.
|
|
384
|
+
*
|
|
385
|
+
* @template TRequirements - Effect requirements for the flow
|
|
386
|
+
* @param flow - Flow instance to extract data from
|
|
387
|
+
* @returns Serializable flow data without execution logic
|
|
388
|
+
*
|
|
389
|
+
* @example
|
|
390
|
+
* ```typescript
|
|
391
|
+
* const flowData = getFlowData(myFlow);
|
|
392
|
+
* // Store in database or send to client
|
|
393
|
+
* await db.flows.save(flowData);
|
|
394
|
+
* ```
|
|
395
|
+
*/
|
|
396
|
+
declare const getFlowData: <TRequirements>(flow: Flow<any, any, TRequirements>) => FlowData;
|
|
397
|
+
/**
|
|
398
|
+
* Result of a flow execution - either completed or paused.
|
|
399
|
+
*
|
|
400
|
+
* @template TOutput - Type of the flow's output data
|
|
401
|
+
*
|
|
402
|
+
* @remarks
|
|
403
|
+
* Flows can pause when a node needs additional data (e.g., waiting for user input
|
|
404
|
+
* or external service). The execution state allows resuming from where it paused.
|
|
405
|
+
*
|
|
406
|
+
* @example
|
|
407
|
+
* ```typescript
|
|
408
|
+
* const result = await Effect.runPromise(flow.run({ inputs, storageId, jobId }));
|
|
409
|
+
*
|
|
410
|
+
* if (result.type === "completed") {
|
|
411
|
+
* console.log("Flow completed:", result.result);
|
|
412
|
+
* } else {
|
|
413
|
+
* console.log("Flow paused at node:", result.nodeId);
|
|
414
|
+
* // Can resume later with: flow.resume({ jobId, executionState: result.executionState, ... })
|
|
415
|
+
* }
|
|
416
|
+
* ```
|
|
417
|
+
*/
|
|
418
|
+
type FlowExecutionResult<TOutput> = {
|
|
419
|
+
type: "completed";
|
|
420
|
+
result: TOutput;
|
|
421
|
+
outputs?: TypedOutput[];
|
|
422
|
+
} | {
|
|
423
|
+
type: "paused";
|
|
424
|
+
nodeId: string;
|
|
425
|
+
executionState: {
|
|
426
|
+
executionOrder: string[];
|
|
427
|
+
currentIndex: number;
|
|
428
|
+
inputs: Record<string, unknown>;
|
|
429
|
+
};
|
|
430
|
+
};
|
|
431
|
+
/**
|
|
432
|
+
* A Flow represents a directed acyclic graph (DAG) of processing nodes.
|
|
433
|
+
*
|
|
434
|
+
* Flows execute nodes in topological order, passing data between nodes through edges.
|
|
435
|
+
* They support conditional execution, retry logic, pausable nodes, and event emission.
|
|
436
|
+
*
|
|
437
|
+
* @template TFlowInputSchema - Zod schema defining the shape of input data
|
|
438
|
+
* @template TFlowOutputSchema - Zod schema defining the shape of output data
|
|
439
|
+
* @template TRequirements - Effect requirements (services/contexts) needed by nodes
|
|
440
|
+
*
|
|
441
|
+
* @property id - Unique flow identifier
|
|
442
|
+
* @property name - Human-readable flow name
|
|
443
|
+
* @property nodes - Array of nodes in the flow
|
|
444
|
+
* @property edges - Connections between nodes
|
|
445
|
+
* @property inputSchema - Zod schema for validating flow inputs
|
|
446
|
+
* @property outputSchema - Zod schema for validating flow outputs
|
|
447
|
+
* @property onEvent - Optional callback for flow execution events
|
|
448
|
+
* @property run - Executes the flow from the beginning
|
|
449
|
+
* @property resume - Resumes a paused flow execution
|
|
450
|
+
* @property validateTypes - Validates node type compatibility
|
|
451
|
+
* @property validateInputs - Validates input data against schema
|
|
452
|
+
* @property validateOutputs - Validates output data against schema
|
|
453
|
+
*
|
|
454
|
+
* @remarks
|
|
455
|
+
* Flows are created using {@link createFlowWithSchema}. The Effect-based design
|
|
456
|
+
* allows for composable error handling, resource management, and dependency injection.
|
|
457
|
+
*
|
|
458
|
+
* @example
|
|
459
|
+
* ```typescript
|
|
460
|
+
* const flow = yield* createFlowWithSchema({
|
|
461
|
+
* flowId: "image-pipeline",
|
|
462
|
+
* name: "Image Processing Pipeline",
|
|
463
|
+
* nodes: [inputNode, resizeNode, optimizeNode, storageNode],
|
|
464
|
+
* edges: [
|
|
465
|
+
* { source: "input", target: "resize" },
|
|
466
|
+
* { source: "resize", target: "optimize" },
|
|
467
|
+
* { source: "optimize", target: "storage" }
|
|
468
|
+
* ],
|
|
469
|
+
* inputSchema: z.object({ file: z.instanceof(File) }),
|
|
470
|
+
* outputSchema: uploadFileSchema
|
|
471
|
+
* });
|
|
472
|
+
*
|
|
473
|
+
* const result = yield* flow.run({
|
|
474
|
+
* inputs: { input: { file: myFile } },
|
|
475
|
+
* storageId: "storage-1",
|
|
476
|
+
* jobId: "job-123"
|
|
477
|
+
* });
|
|
478
|
+
* ```
|
|
479
|
+
*/
|
|
480
|
+
type Flow<TFlowInputSchema extends z.ZodSchema<any>, TFlowOutputSchema extends z.ZodSchema<any>, TRequirements> = {
|
|
481
|
+
id: string;
|
|
482
|
+
name: string;
|
|
483
|
+
nodes: FlowNode<any, any, UploadistaError>[];
|
|
484
|
+
edges: FlowEdge[];
|
|
485
|
+
inputSchema: TFlowInputSchema;
|
|
486
|
+
outputSchema: TFlowOutputSchema;
|
|
487
|
+
onEvent?: FlowConfig<TFlowInputSchema, TFlowOutputSchema, TRequirements>["onEvent"];
|
|
488
|
+
checkJobStatus?: FlowConfig<TFlowInputSchema, TFlowOutputSchema, TRequirements>["checkJobStatus"];
|
|
489
|
+
hooks?: FlowConfig<TFlowInputSchema, TFlowOutputSchema, TRequirements>["hooks"];
|
|
490
|
+
run: (args: {
|
|
491
|
+
inputs?: Record<string, z.infer<TFlowInputSchema>>;
|
|
492
|
+
storageId: string;
|
|
493
|
+
jobId: string;
|
|
494
|
+
clientId: string | null;
|
|
495
|
+
}) => Effect.Effect<FlowExecutionResult<Record<string, z.infer<TFlowOutputSchema>>>, UploadistaError, TRequirements | UploadFileDataStores>;
|
|
496
|
+
resume: (args: {
|
|
497
|
+
jobId: string;
|
|
498
|
+
storageId: string;
|
|
499
|
+
nodeResults: Record<string, unknown>;
|
|
500
|
+
executionState: {
|
|
501
|
+
executionOrder: string[];
|
|
502
|
+
currentIndex: number;
|
|
503
|
+
inputs: Record<string, z.infer<TFlowInputSchema>>;
|
|
504
|
+
};
|
|
505
|
+
clientId: string | null;
|
|
506
|
+
}) => Effect.Effect<FlowExecutionResult<Record<string, z.infer<TFlowOutputSchema>>>, UploadistaError, TRequirements | UploadFileDataStores>;
|
|
507
|
+
validateTypes: () => {
|
|
508
|
+
isValid: boolean;
|
|
509
|
+
errors: string[];
|
|
510
|
+
};
|
|
511
|
+
validateInputs: (inputs: unknown) => {
|
|
512
|
+
isValid: boolean;
|
|
513
|
+
errors: string[];
|
|
514
|
+
};
|
|
515
|
+
validateOutputs: (outputs: unknown) => {
|
|
516
|
+
isValid: boolean;
|
|
517
|
+
errors: string[];
|
|
518
|
+
};
|
|
519
|
+
};
|
|
520
|
+
/**
|
|
521
|
+
* Creates a new Flow with Zod schema-based type validation.
|
|
522
|
+
*
|
|
523
|
+
* This is the primary way to create flows in Uploadista. It constructs a Flow
|
|
524
|
+
* instance that validates inputs/outputs, executes nodes in topological order,
|
|
525
|
+
* handles errors with retries, and emits events during execution.
|
|
526
|
+
*
|
|
527
|
+
* @template TFlowInputSchema - Zod schema for flow input validation
|
|
528
|
+
* @template TFlowOutputSchema - Zod schema for flow output validation
|
|
529
|
+
* @template TRequirements - Effect requirements/services needed by the flow
|
|
530
|
+
* @template TNodeError - Union of possible errors from nodes
|
|
531
|
+
* @template TNodeRequirements - Union of requirements from nodes
|
|
532
|
+
*
|
|
533
|
+
* @param config - Flow configuration object
|
|
534
|
+
* @param config.flowId - Unique identifier for the flow
|
|
535
|
+
* @param config.name - Human-readable flow name
|
|
536
|
+
* @param config.nodes - Array of nodes (can be plain nodes or Effects resolving to nodes)
|
|
537
|
+
* @param config.edges - Array of edges connecting nodes
|
|
538
|
+
* @param config.inputSchema - Zod schema for validating inputs
|
|
539
|
+
* @param config.outputSchema - Zod schema for validating outputs
|
|
540
|
+
* @param config.typeChecker - Optional custom type compatibility checker
|
|
541
|
+
* @param config.onEvent - Optional event callback for monitoring execution
|
|
542
|
+
*
|
|
543
|
+
* @returns Effect that resolves to a Flow instance
|
|
544
|
+
*
|
|
545
|
+
* @throws {UploadistaError} FLOW_CYCLE_ERROR if the graph contains cycles
|
|
546
|
+
* @throws {UploadistaError} FLOW_NODE_NOT_FOUND if a node is referenced but missing
|
|
547
|
+
* @throws {UploadistaError} FLOW_NODE_ERROR if node execution fails
|
|
548
|
+
* @throws {UploadistaError} FLOW_OUTPUT_VALIDATION_ERROR if outputs don't match schema
|
|
549
|
+
*
|
|
550
|
+
* @remarks
|
|
551
|
+
* - Nodes can be provided as plain objects or as Effects that resolve to nodes
|
|
552
|
+
* - The flow performs topological sorting to determine execution order
|
|
553
|
+
* - Conditional nodes are evaluated before execution
|
|
554
|
+
* - Nodes can specify retry configuration with exponential backoff
|
|
555
|
+
* - Pausable nodes can halt execution and resume later
|
|
556
|
+
*
|
|
557
|
+
* @example
|
|
558
|
+
* ```typescript
|
|
559
|
+
* const flow = yield* createFlowWithSchema({
|
|
560
|
+
* flowId: "image-upload",
|
|
561
|
+
* name: "Image Upload with Processing",
|
|
562
|
+
* nodes: [
|
|
563
|
+
* inputNode,
|
|
564
|
+
* yield* createResizeNode({ width: 1920, height: 1080 }),
|
|
565
|
+
* optimizeNode,
|
|
566
|
+
* storageNode
|
|
567
|
+
* ],
|
|
568
|
+
* edges: [
|
|
569
|
+
* { source: "input", target: "resize" },
|
|
570
|
+
* { source: "resize", target: "optimize" },
|
|
571
|
+
* { source: "optimize", target: "storage" }
|
|
572
|
+
* ],
|
|
573
|
+
* inputSchema: z.object({
|
|
574
|
+
* file: z.instanceof(File),
|
|
575
|
+
* metadata: z.record(z.string(), z.any()).optional()
|
|
576
|
+
* }),
|
|
577
|
+
* outputSchema: uploadFileSchema,
|
|
578
|
+
* onEvent: (event) => Effect.gen(function* () {
|
|
579
|
+
* console.log("Flow event:", event);
|
|
580
|
+
* return { eventId: event.jobId };
|
|
581
|
+
* })
|
|
582
|
+
* });
|
|
583
|
+
* ```
|
|
584
|
+
*
|
|
585
|
+
* @see {@link Flow} for the returned flow type
|
|
586
|
+
* @see {@link FlowConfig} for configuration options
|
|
587
|
+
*/
|
|
588
|
+
declare function createFlowWithSchema<TFlowInputSchema extends z.ZodSchema<any>, TFlowOutputSchema extends z.ZodSchema<any>, TRequirements = never, TNodeError = never, TNodeRequirements = never>(config: FlowConfig<TFlowInputSchema, TFlowOutputSchema, TNodeError, TNodeRequirements>): Effect.Effect<Flow<TFlowInputSchema, TFlowOutputSchema, TRequirements>, TNodeError, TNodeRequirements>;
|
|
589
|
+
//#endregion
|
|
590
|
+
//#region src/flow/input-type-registry.d.ts
|
|
591
|
+
/**
|
|
592
|
+
* Defines a registered input type with its schema and metadata.
|
|
593
|
+
*
|
|
594
|
+
* Input type definitions describe how external clients interact with input nodes.
|
|
595
|
+
* Unlike output types, input types define the external interface (e.g., init/finalize
|
|
596
|
+
* operations for streaming uploads).
|
|
597
|
+
*
|
|
598
|
+
* @template TSchema - The Zod schema type for this input's data
|
|
599
|
+
*
|
|
600
|
+
* @property id - Unique identifier (e.g., "streaming-input-v1")
|
|
601
|
+
* @property schema - Zod schema for validating input data from clients
|
|
602
|
+
* @property version - Semantic version (e.g., "1.0.0") for tracking type evolution
|
|
603
|
+
* @property description - Human-readable explanation of what this input type does
|
|
604
|
+
*/
|
|
605
|
+
interface InputTypeDefinition<TSchema = unknown> {
|
|
606
|
+
id: string;
|
|
607
|
+
schema: z.ZodSchema<TSchema>;
|
|
608
|
+
version: string;
|
|
609
|
+
description: string;
|
|
610
|
+
}
|
|
611
|
+
/**
|
|
612
|
+
* Result type for input validation operations.
|
|
613
|
+
*
|
|
614
|
+
* @template T - The expected type on successful validation
|
|
615
|
+
*/
|
|
616
|
+
type InputValidationResult<T> = {
|
|
617
|
+
success: true;
|
|
618
|
+
data: T;
|
|
619
|
+
} | {
|
|
620
|
+
success: false;
|
|
621
|
+
error: UploadistaError;
|
|
622
|
+
};
|
|
623
|
+
/**
|
|
624
|
+
* Registry for input node type definitions.
|
|
625
|
+
*
|
|
626
|
+
* The InputTypeRegistry maintains a global registry of input types with their schemas
|
|
627
|
+
* and metadata. Input types describe how data enters the flow from external sources.
|
|
628
|
+
*
|
|
629
|
+
* @remarks
|
|
630
|
+
* - Use the exported `inputTypeRegistry` singleton instance
|
|
631
|
+
* - Types cannot be unregistered or modified after registration
|
|
632
|
+
* - Duplicate type IDs are rejected
|
|
633
|
+
*
|
|
634
|
+
* @example
|
|
635
|
+
* ```typescript
|
|
636
|
+
* // Register a new input type
|
|
637
|
+
* inputTypeRegistry.register({
|
|
638
|
+
* id: "form-input-v1",
|
|
639
|
+
* schema: formInputSchema,
|
|
640
|
+
* version: "1.0.0",
|
|
641
|
+
* description: "Form-based file input",
|
|
642
|
+
* });
|
|
643
|
+
*
|
|
644
|
+
* // Check if type exists
|
|
645
|
+
* if (inputTypeRegistry.has("streaming-input-v1")) {
|
|
646
|
+
* const def = inputTypeRegistry.get("streaming-input-v1");
|
|
647
|
+
* }
|
|
648
|
+
* ```
|
|
649
|
+
*/
|
|
650
|
+
declare class InputTypeRegistry {
|
|
651
|
+
private readonly types;
|
|
652
|
+
constructor();
|
|
653
|
+
/**
|
|
654
|
+
* Register a new input type in the registry.
|
|
655
|
+
*
|
|
656
|
+
* @template T - The TypeScript type inferred from the Zod schema
|
|
657
|
+
* @param definition - The complete type definition including schema and metadata
|
|
658
|
+
* @throws {UploadistaError} If a type with the same ID is already registered
|
|
659
|
+
*/
|
|
660
|
+
register<T>(definition: InputTypeDefinition<T>): void;
|
|
661
|
+
/**
|
|
662
|
+
* Retrieve a registered type definition by its ID.
|
|
663
|
+
*
|
|
664
|
+
* @param id - The unique type identifier (e.g., "streaming-input-v1")
|
|
665
|
+
* @returns The type definition if found, undefined otherwise
|
|
666
|
+
*/
|
|
667
|
+
get(id: string): InputTypeDefinition<unknown> | undefined;
|
|
668
|
+
/**
|
|
669
|
+
* List all registered input types.
|
|
670
|
+
*
|
|
671
|
+
* @returns Array of all input type definitions
|
|
672
|
+
*/
|
|
673
|
+
list(): InputTypeDefinition<unknown>[];
|
|
674
|
+
/**
|
|
675
|
+
* Validate data against a registered type's schema.
|
|
676
|
+
*
|
|
677
|
+
* @template T - The expected TypeScript type after validation
|
|
678
|
+
* @param typeId - The ID of the registered type to validate against
|
|
679
|
+
* @param data - The data to validate
|
|
680
|
+
* @returns A result object with either the validated data or an error
|
|
681
|
+
*/
|
|
682
|
+
validate<T>(typeId: string, data: unknown): InputValidationResult<T>;
|
|
683
|
+
/**
|
|
684
|
+
* Check if a type is registered.
|
|
685
|
+
*
|
|
686
|
+
* @param id - The unique type identifier to check
|
|
687
|
+
* @returns True if the type is registered, false otherwise
|
|
688
|
+
*/
|
|
689
|
+
has(id: string): boolean;
|
|
690
|
+
/**
|
|
691
|
+
* Get the total number of registered types.
|
|
692
|
+
*
|
|
693
|
+
* @returns The count of registered types
|
|
694
|
+
*/
|
|
695
|
+
size(): number;
|
|
696
|
+
}
|
|
697
|
+
/**
|
|
698
|
+
* Global singleton instance of the input type registry.
|
|
699
|
+
*
|
|
700
|
+
* Use this instance to register and access input node type definitions.
|
|
701
|
+
* Input types describe how data enters the flow from external sources.
|
|
702
|
+
*
|
|
703
|
+
* @example
|
|
704
|
+
* ```typescript
|
|
705
|
+
* import { inputTypeRegistry } from "@uploadista/core/flow";
|
|
706
|
+
*
|
|
707
|
+
* // Register a type
|
|
708
|
+
* inputTypeRegistry.register({
|
|
709
|
+
* id: "my-input-v1",
|
|
710
|
+
* schema: myInputSchema,
|
|
711
|
+
* version: "1.0.0",
|
|
712
|
+
* description: "My custom input type",
|
|
713
|
+
* });
|
|
714
|
+
*
|
|
715
|
+
* // Validate data
|
|
716
|
+
* const result = inputTypeRegistry.validate("my-input-v1", data);
|
|
717
|
+
* ```
|
|
718
|
+
*/
|
|
719
|
+
declare const inputTypeRegistry: InputTypeRegistry;
|
|
720
|
+
/**
|
|
721
|
+
* Validates flow input data against a registered input type.
|
|
722
|
+
*
|
|
723
|
+
* @param typeId - The registered type ID (e.g., "streaming-input-v1")
|
|
724
|
+
* @param data - The input data to validate
|
|
725
|
+
* @returns A validation result with either the typed data or an error
|
|
726
|
+
*/
|
|
727
|
+
declare function validateFlowInput<T = unknown>(typeId: string, data: unknown): InputValidationResult<T>;
|
|
728
|
+
//#endregion
|
|
729
|
+
//#region src/flow/output-type-registry.d.ts
|
|
730
|
+
/**
|
|
731
|
+
* Defines a registered output type with its schema and metadata.
|
|
732
|
+
*
|
|
733
|
+
* Output type definitions describe the data shapes produced by nodes. This enables
|
|
734
|
+
* type-safe result consumption where clients can narrow types based on the
|
|
735
|
+
* `nodeType` field in results.
|
|
736
|
+
*
|
|
737
|
+
* @template TSchema - The Zod schema type for this output's data
|
|
738
|
+
*
|
|
739
|
+
* @property id - Unique identifier (e.g., "storage-output-v1", "ocr-output-v1")
|
|
740
|
+
* @property schema - Zod schema for validating output data
|
|
741
|
+
* @property version - Semantic version (e.g., "1.0.0") for tracking type evolution
|
|
742
|
+
* @property description - Human-readable explanation of what this output type contains
|
|
743
|
+
*/
|
|
744
|
+
interface OutputTypeDefinition<TSchema = unknown> {
|
|
745
|
+
id: string;
|
|
746
|
+
schema: z.ZodSchema<TSchema>;
|
|
747
|
+
version: string;
|
|
748
|
+
description: string;
|
|
749
|
+
}
|
|
750
|
+
/**
|
|
751
|
+
* Result type for output validation operations.
|
|
752
|
+
*
|
|
753
|
+
* @template T - The expected type on successful validation
|
|
754
|
+
*/
|
|
755
|
+
type OutputValidationResult<T> = {
|
|
756
|
+
success: true;
|
|
757
|
+
data: T;
|
|
758
|
+
} | {
|
|
759
|
+
success: false;
|
|
760
|
+
error: UploadistaError;
|
|
761
|
+
};
|
|
762
|
+
/**
|
|
763
|
+
* Registry for output node type definitions.
|
|
764
|
+
*
|
|
765
|
+
* The OutputTypeRegistry maintains a global registry of output types with their schemas
|
|
766
|
+
* and metadata. Output types describe the data shapes that flow through the system
|
|
767
|
+
* and appear in results.
|
|
768
|
+
*
|
|
769
|
+
* @remarks
|
|
770
|
+
* - Use the exported `outputTypeRegistry` singleton instance
|
|
771
|
+
* - Types cannot be unregistered or modified after registration
|
|
772
|
+
* - Duplicate type IDs are rejected
|
|
773
|
+
*
|
|
774
|
+
* @example
|
|
775
|
+
* ```typescript
|
|
776
|
+
* // Register a new output type
|
|
777
|
+
* outputTypeRegistry.register({
|
|
778
|
+
* id: "metadata-output-v1",
|
|
779
|
+
* schema: metadataSchema,
|
|
780
|
+
* version: "1.0.0",
|
|
781
|
+
* description: "File metadata extraction output",
|
|
782
|
+
* });
|
|
783
|
+
*
|
|
784
|
+
* // Validate result data
|
|
785
|
+
* const result = outputTypeRegistry.validate("storage-output-v1", data);
|
|
786
|
+
* if (result.success) {
|
|
787
|
+
* console.log(result.data.url);
|
|
788
|
+
* }
|
|
789
|
+
* ```
|
|
790
|
+
*/
|
|
791
|
+
declare class OutputTypeRegistry {
|
|
792
|
+
private readonly types;
|
|
793
|
+
constructor();
|
|
794
|
+
/**
|
|
795
|
+
* Register a new output type in the registry.
|
|
796
|
+
*
|
|
797
|
+
* @template T - The TypeScript type inferred from the Zod schema
|
|
798
|
+
* @param definition - The complete type definition including schema and metadata
|
|
799
|
+
* @throws {UploadistaError} If a type with the same ID is already registered
|
|
800
|
+
*/
|
|
801
|
+
register<T>(definition: OutputTypeDefinition<T>): void;
|
|
802
|
+
/**
|
|
803
|
+
* Retrieve a registered type definition by its ID.
|
|
804
|
+
*
|
|
805
|
+
* @param id - The unique type identifier (e.g., "storage-output-v1")
|
|
806
|
+
* @returns The type definition if found, undefined otherwise
|
|
807
|
+
*/
|
|
808
|
+
get(id: string): OutputTypeDefinition<unknown> | undefined;
|
|
809
|
+
/**
|
|
810
|
+
* List all registered output types.
|
|
811
|
+
*
|
|
812
|
+
* @returns Array of all output type definitions
|
|
813
|
+
*/
|
|
814
|
+
list(): OutputTypeDefinition<unknown>[];
|
|
815
|
+
/**
|
|
816
|
+
* Validate data against a registered type's schema.
|
|
817
|
+
*
|
|
818
|
+
* @template T - The expected TypeScript type after validation
|
|
819
|
+
* @param typeId - The ID of the registered type to validate against
|
|
820
|
+
* @param data - The data to validate
|
|
821
|
+
* @returns A result object with either the validated data or an error
|
|
822
|
+
*/
|
|
823
|
+
validate<T>(typeId: string, data: unknown): OutputValidationResult<T>;
|
|
824
|
+
/**
|
|
825
|
+
* Check if a type is registered.
|
|
826
|
+
*
|
|
827
|
+
* @param id - The unique type identifier to check
|
|
828
|
+
* @returns True if the type is registered, false otherwise
|
|
829
|
+
*/
|
|
830
|
+
has(id: string): boolean;
|
|
831
|
+
/**
|
|
832
|
+
* Get the total number of registered types.
|
|
833
|
+
*
|
|
834
|
+
* @returns The count of registered types
|
|
835
|
+
*/
|
|
836
|
+
size(): number;
|
|
837
|
+
}
|
|
838
|
+
/**
|
|
839
|
+
* Global singleton instance of the output type registry.
|
|
840
|
+
*
|
|
841
|
+
* Use this instance to register and access output node type definitions.
|
|
842
|
+
* Output types describe the data shapes produced by nodes and used in results.
|
|
843
|
+
*
|
|
844
|
+
* @example
|
|
845
|
+
* ```typescript
|
|
846
|
+
* import { outputTypeRegistry } from "@uploadista/core/flow";
|
|
847
|
+
*
|
|
848
|
+
* // Register a type
|
|
849
|
+
* outputTypeRegistry.register({
|
|
850
|
+
* id: "my-output-v1",
|
|
851
|
+
* schema: myOutputSchema,
|
|
852
|
+
* version: "1.0.0",
|
|
853
|
+
* description: "My custom output type",
|
|
854
|
+
* });
|
|
855
|
+
*
|
|
856
|
+
* // Validate result data
|
|
857
|
+
* const result = outputTypeRegistry.validate("my-output-v1", data);
|
|
858
|
+
* ```
|
|
859
|
+
*/
|
|
860
|
+
declare const outputTypeRegistry: OutputTypeRegistry;
|
|
861
|
+
/**
|
|
862
|
+
* Validates flow output data against a registered output type.
|
|
863
|
+
*
|
|
864
|
+
* @param typeId - The registered type ID (e.g., "storage-output-v1")
|
|
865
|
+
* @param data - The output data to validate
|
|
866
|
+
* @returns A validation result with either the typed data or an error
|
|
867
|
+
*/
|
|
868
|
+
declare function validateFlowOutput<T = unknown>(typeId: string, data: unknown): OutputValidationResult<T>;
|
|
869
|
+
//#endregion
|
|
870
|
+
//#region src/flow/node-types/index.d.ts
|
|
871
|
+
/**
|
|
872
|
+
* Type ID constants for built-in node types.
|
|
873
|
+
*
|
|
874
|
+
* Use these constants when creating nodes with type information to ensure
|
|
875
|
+
* consistency and avoid typos.
|
|
876
|
+
*
|
|
877
|
+
* @example
|
|
878
|
+
* ```typescript
|
|
879
|
+
* import { STREAMING_INPUT_TYPE_ID, STORAGE_OUTPUT_TYPE_ID } from "@uploadista/core/flow";
|
|
880
|
+
*
|
|
881
|
+
* const inputNode = createFlowNode({
|
|
882
|
+
* // ... other config
|
|
883
|
+
* inputTypeId: STREAMING_INPUT_TYPE_ID,
|
|
884
|
+
* outputTypeId: STORAGE_OUTPUT_TYPE_ID,
|
|
885
|
+
* });
|
|
886
|
+
* ```
|
|
887
|
+
*/
|
|
888
|
+
declare const STORAGE_OUTPUT_TYPE_ID = "storage-output-v1";
|
|
889
|
+
declare const OCR_OUTPUT_TYPE_ID = "ocr-output-v1";
|
|
890
|
+
declare const IMAGE_DESCRIPTION_OUTPUT_TYPE_ID = "image-description-output-v1";
|
|
891
|
+
declare const STREAMING_INPUT_TYPE_ID = "streaming-input-v1";
|
|
892
|
+
/**
|
|
893
|
+
* OCR output schema - structured text extraction result.
|
|
894
|
+
*
|
|
895
|
+
* @property extractedText - The text extracted from the document
|
|
896
|
+
* @property format - Output format (text, markdown, or JSON)
|
|
897
|
+
* @property taskType - Type of OCR task performed
|
|
898
|
+
* @property confidence - Optional confidence score (0-1)
|
|
899
|
+
*/
|
|
900
|
+
declare const ocrOutputSchema: z.ZodObject<{
|
|
901
|
+
extractedText: z.ZodString;
|
|
902
|
+
format: z.ZodEnum<{
|
|
903
|
+
markdown: "markdown";
|
|
904
|
+
plain: "plain";
|
|
905
|
+
structured: "structured";
|
|
906
|
+
}>;
|
|
907
|
+
taskType: z.ZodEnum<{
|
|
908
|
+
convertToMarkdown: "convertToMarkdown";
|
|
909
|
+
freeOcr: "freeOcr";
|
|
910
|
+
parseFigure: "parseFigure";
|
|
911
|
+
locateObject: "locateObject";
|
|
912
|
+
}>;
|
|
913
|
+
confidence: z.ZodOptional<z.ZodNumber>;
|
|
914
|
+
}, z.core.$strip>;
|
|
915
|
+
type OcrOutput = z.infer<typeof ocrOutputSchema>;
|
|
916
|
+
/**
|
|
917
|
+
* Image description output schema - AI-generated image analysis result.
|
|
918
|
+
*
|
|
919
|
+
* @property description - Human-readable description of the image
|
|
920
|
+
* @property confidence - Confidence score for the description (0-1)
|
|
921
|
+
* @property metadata - Additional metadata about the description
|
|
922
|
+
*/
|
|
923
|
+
declare const imageDescriptionOutputSchema: z.ZodObject<{
|
|
924
|
+
description: z.ZodString;
|
|
925
|
+
confidence: z.ZodOptional<z.ZodNumber>;
|
|
926
|
+
metadata: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
|
|
927
|
+
}, z.core.$strip>;
|
|
928
|
+
type ImageDescriptionOutput = z.infer<typeof imageDescriptionOutputSchema>;
|
|
929
|
+
//#endregion
|
|
930
|
+
//#region src/flow/dead-letter-queue.d.ts
|
|
931
|
+
/**
|
|
932
|
+
* Shape of the Dead Letter Queue service.
|
|
933
|
+
*
|
|
934
|
+
* Provides all operations for managing failed flow jobs including
|
|
935
|
+
* adding items, querying, retrying, and cleanup.
|
|
936
|
+
*/
|
|
937
|
+
interface DeadLetterQueueServiceShape {
|
|
938
|
+
/**
|
|
939
|
+
* Add a failed job to the DLQ with full failure context.
|
|
940
|
+
*
|
|
941
|
+
* @param job - The failed flow job
|
|
942
|
+
* @param error - The error that caused the failure
|
|
943
|
+
* @param retryPolicy - Optional retry policy (uses default if not provided)
|
|
944
|
+
* @returns The created DLQ item
|
|
945
|
+
*/
|
|
946
|
+
add(job: FlowJob, error: UploadistaError, retryPolicy?: RetryPolicy): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
947
|
+
/**
|
|
948
|
+
* Get a specific DLQ item by ID.
|
|
949
|
+
*
|
|
950
|
+
* @param itemId - The DLQ item ID
|
|
951
|
+
* @returns The DLQ item
|
|
952
|
+
*/
|
|
953
|
+
get(itemId: string): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
954
|
+
/**
|
|
955
|
+
* Get a DLQ item by ID, returning None if not found.
|
|
956
|
+
*
|
|
957
|
+
* @param itemId - The DLQ item ID
|
|
958
|
+
* @returns Option of the DLQ item
|
|
959
|
+
*/
|
|
960
|
+
getOption(itemId: string): Effect.Effect<Option.Option<DeadLetterItem>, UploadistaError>;
|
|
961
|
+
/**
|
|
962
|
+
* Delete a DLQ item.
|
|
963
|
+
*
|
|
964
|
+
* @param itemId - The DLQ item ID to delete
|
|
965
|
+
*/
|
|
966
|
+
delete(itemId: string): Effect.Effect<void, UploadistaError>;
|
|
967
|
+
/**
|
|
968
|
+
* List DLQ items with optional filtering and pagination.
|
|
969
|
+
*
|
|
970
|
+
* @param options - Filter and pagination options
|
|
971
|
+
* @returns List of items and total count
|
|
972
|
+
*/
|
|
973
|
+
list(options?: DeadLetterListOptions): Effect.Effect<{
|
|
974
|
+
items: DeadLetterItem[];
|
|
975
|
+
total: number;
|
|
976
|
+
}, UploadistaError>;
|
|
977
|
+
/**
|
|
978
|
+
* Update a DLQ item.
|
|
979
|
+
*
|
|
980
|
+
* @param itemId - The DLQ item ID
|
|
981
|
+
* @param updates - Partial updates to apply
|
|
982
|
+
* @returns The updated item
|
|
983
|
+
*/
|
|
984
|
+
update(itemId: string, updates: Partial<DeadLetterItem>): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
985
|
+
/**
|
|
986
|
+
* Mark a DLQ item as being retried.
|
|
987
|
+
*
|
|
988
|
+
* @param itemId - The DLQ item ID
|
|
989
|
+
* @returns The updated item with status "retrying"
|
|
990
|
+
*/
|
|
991
|
+
markRetrying(itemId: string): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
992
|
+
/**
|
|
993
|
+
* Record a failed retry attempt.
|
|
994
|
+
*
|
|
995
|
+
* @param itemId - The DLQ item ID
|
|
996
|
+
* @param error - Error message from the failed retry
|
|
997
|
+
* @param durationMs - Duration of the retry attempt
|
|
998
|
+
* @returns The updated item
|
|
999
|
+
*/
|
|
1000
|
+
recordRetryFailure(itemId: string, error: string, durationMs: number): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
1001
|
+
/**
|
|
1002
|
+
* Mark a DLQ item as resolved (successfully retried or manually resolved).
|
|
1003
|
+
*
|
|
1004
|
+
* @param itemId - The DLQ item ID
|
|
1005
|
+
* @returns The updated item with status "resolved"
|
|
1006
|
+
*/
|
|
1007
|
+
markResolved(itemId: string): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
1008
|
+
/**
|
|
1009
|
+
* Get items that are due for scheduled retry.
|
|
1010
|
+
*
|
|
1011
|
+
* @param limit - Maximum number of items to return
|
|
1012
|
+
* @returns List of items ready for retry
|
|
1013
|
+
*/
|
|
1014
|
+
getScheduledRetries(limit?: number): Effect.Effect<DeadLetterItem[], UploadistaError>;
|
|
1015
|
+
/**
|
|
1016
|
+
* Cleanup old DLQ items based on options.
|
|
1017
|
+
*
|
|
1018
|
+
* @param options - Cleanup criteria
|
|
1019
|
+
* @returns Number of items deleted
|
|
1020
|
+
*/
|
|
1021
|
+
cleanup(options?: DeadLetterCleanupOptions): Effect.Effect<DeadLetterCleanupResult, UploadistaError>;
|
|
1022
|
+
/**
|
|
1023
|
+
* Get DLQ statistics.
|
|
1024
|
+
*
|
|
1025
|
+
* @returns Aggregate statistics about the DLQ
|
|
1026
|
+
*/
|
|
1027
|
+
getStats(): Effect.Effect<DeadLetterQueueStats, UploadistaError>;
|
|
1028
|
+
}
|
|
1029
|
+
declare const DeadLetterQueueService_base: Context.TagClass<DeadLetterQueueService, "DeadLetterQueueService", DeadLetterQueueServiceShape>;
|
|
1030
|
+
/**
|
|
1031
|
+
* Effect-TS context tag for the Dead Letter Queue service.
|
|
1032
|
+
*
|
|
1033
|
+
* @example
|
|
1034
|
+
* ```typescript
|
|
1035
|
+
* const effect = Effect.gen(function* () {
|
|
1036
|
+
* const dlq = yield* DeadLetterQueueService;
|
|
1037
|
+
* const stats = yield* dlq.getStats();
|
|
1038
|
+
* console.log(`DLQ has ${stats.totalItems} items`);
|
|
1039
|
+
* });
|
|
1040
|
+
* ```
|
|
1041
|
+
*/
|
|
1042
|
+
declare class DeadLetterQueueService extends DeadLetterQueueService_base {
|
|
1043
|
+
/**
|
|
1044
|
+
* Access the DLQ service optionally (for integration in FlowServer).
|
|
1045
|
+
* Returns Option.none if the service is not provided.
|
|
1046
|
+
*/
|
|
1047
|
+
static optional: Effect.Effect<Option.Option<DeadLetterQueueServiceShape>, never, never>;
|
|
1048
|
+
}
|
|
1049
|
+
/**
|
|
1050
|
+
* Creates the Dead Letter Queue service implementation.
|
|
1051
|
+
*
|
|
1052
|
+
* @returns Effect that creates the DLQ service
|
|
1053
|
+
*/
|
|
1054
|
+
declare function createDeadLetterQueueService(): Effect.Effect<DeadLetterQueueServiceShape, never, DeadLetterQueueKVStore>;
|
|
1055
|
+
/**
|
|
1056
|
+
* Effect Layer that creates the DeadLetterQueueService.
|
|
1057
|
+
*
|
|
1058
|
+
* @example
|
|
1059
|
+
* ```typescript
|
|
1060
|
+
* const program = Effect.gen(function* () {
|
|
1061
|
+
* const dlq = yield* DeadLetterQueueService;
|
|
1062
|
+
* const stats = yield* dlq.getStats();
|
|
1063
|
+
* return stats;
|
|
1064
|
+
* }).pipe(
|
|
1065
|
+
* Effect.provide(deadLetterQueueService),
|
|
1066
|
+
* Effect.provide(deadLetterQueueKvStore),
|
|
1067
|
+
* Effect.provide(baseStoreLayer)
|
|
1068
|
+
* );
|
|
1069
|
+
* ```
|
|
1070
|
+
*/
|
|
1071
|
+
declare const deadLetterQueueService: Layer.Layer<DeadLetterQueueService, never, DeadLetterQueueKVStore>;
|
|
1072
|
+
//#endregion
|
|
1073
|
+
//#region src/flow/flow-queue-store.d.ts
|
|
1074
|
+
/**
|
|
1075
|
+
* Adapter interface for flow queue item persistence.
|
|
1076
|
+
*
|
|
1077
|
+
* Implementations must provide CRUD operations for FlowQueueItems and an
|
|
1078
|
+
* efficient status-based listing. Redis implementations use sorted sets and
|
|
1079
|
+
* sets for O(log n) lookups; the memory implementation uses a plain Map.
|
|
1080
|
+
*
|
|
1081
|
+
* @example
|
|
1082
|
+
* ```typescript
|
|
1083
|
+
* // Provide a custom store via the Effect layer
|
|
1084
|
+
* const customStore: FlowQueueStore = { ... };
|
|
1085
|
+
* ```
|
|
1086
|
+
*/
|
|
1087
|
+
interface FlowQueueStore {
|
|
1088
|
+
/**
|
|
1089
|
+
* Persist a new queue item.
|
|
1090
|
+
*
|
|
1091
|
+
* @param item - The fully-constructed FlowQueueItem to store
|
|
1092
|
+
* @returns The stored item
|
|
1093
|
+
*/
|
|
1094
|
+
createItem(item: FlowQueueItem): Effect.Effect<FlowQueueItem, UploadistaError>;
|
|
1095
|
+
/**
|
|
1096
|
+
* Retrieve a queue item by ID.
|
|
1097
|
+
*
|
|
1098
|
+
* @param id - The queue item ID
|
|
1099
|
+
* @returns The item, or null if not found
|
|
1100
|
+
*/
|
|
1101
|
+
getItem(id: string): Effect.Effect<FlowQueueItem | null, UploadistaError>;
|
|
1102
|
+
/**
|
|
1103
|
+
* Apply partial updates to an existing queue item.
|
|
1104
|
+
*
|
|
1105
|
+
* The implementation must atomically update the item and any status-based
|
|
1106
|
+
* indexes (e.g., Redis sorted sets) when status changes.
|
|
1107
|
+
*
|
|
1108
|
+
* @param id - The queue item ID
|
|
1109
|
+
* @param updates - Fields to update (merged over the existing item)
|
|
1110
|
+
* @returns The fully-updated item
|
|
1111
|
+
*/
|
|
1112
|
+
updateItem(id: string, updates: Partial<FlowQueueItem>): Effect.Effect<FlowQueueItem, UploadistaError>;
|
|
1113
|
+
/**
|
|
1114
|
+
* List all items with a specific status.
|
|
1115
|
+
*
|
|
1116
|
+
* For "pending" items, results SHOULD be returned in FIFO order (oldest first)
|
|
1117
|
+
* so that the worker loop dispatches items in enqueuedAt order.
|
|
1118
|
+
*
|
|
1119
|
+
* @param status - The status to filter by
|
|
1120
|
+
* @returns Array of matching items
|
|
1121
|
+
*/
|
|
1122
|
+
listByStatus(status: FlowQueueItemStatus): Effect.Effect<FlowQueueItem[], UploadistaError>;
|
|
1123
|
+
/**
|
|
1124
|
+
* Remove a queue item from the store.
|
|
1125
|
+
*
|
|
1126
|
+
* @param id - The queue item ID to remove
|
|
1127
|
+
*/
|
|
1128
|
+
deleteItem(id: string): Effect.Effect<void, UploadistaError>;
|
|
1129
|
+
}
|
|
1130
|
+
/**
|
|
1131
|
+
* In-memory implementation of FlowQueueStore.
|
|
1132
|
+
*
|
|
1133
|
+
* Uses a plain Map for storage. Items are not persisted across process restarts.
|
|
1134
|
+
* Suitable for single-process deployments or development/testing.
|
|
1135
|
+
*
|
|
1136
|
+
* For durability across restarts, use RedisFlowQueueStore or IoRedisFlowQueueStore.
|
|
1137
|
+
*
|
|
1138
|
+
* @example
|
|
1139
|
+
* ```typescript
|
|
1140
|
+
* const store = new MemoryFlowQueueStore();
|
|
1141
|
+
* // Pass to FlowQueueService.make(config, store)
|
|
1142
|
+
* ```
|
|
1143
|
+
*/
|
|
1144
|
+
declare class MemoryFlowQueueStore implements FlowQueueStore {
|
|
1145
|
+
private readonly items;
|
|
1146
|
+
createItem(item: FlowQueueItem): Effect.Effect<FlowQueueItem, UploadistaError>;
|
|
1147
|
+
getItem(id: string): Effect.Effect<FlowQueueItem | null, UploadistaError>;
|
|
1148
|
+
updateItem(id: string, updates: Partial<FlowQueueItem>): Effect.Effect<FlowQueueItem, UploadistaError>;
|
|
1149
|
+
listByStatus(status: FlowQueueItemStatus): Effect.Effect<FlowQueueItem[], UploadistaError>;
|
|
1150
|
+
deleteItem(id: string): Effect.Effect<void, UploadistaError>;
|
|
1151
|
+
}
|
|
1152
|
+
//#endregion
|
|
1153
|
+
//#region src/flow/flow-engine.d.ts
|
|
1154
|
+
/**
|
|
1155
|
+
* WaitUntil callback type for keeping background tasks alive.
|
|
1156
|
+
* Used in serverless environments like Cloudflare Workers to prevent
|
|
1157
|
+
* premature termination of background operations.
|
|
1158
|
+
*
|
|
1159
|
+
* @param promise - Promise representing the background task to keep alive
|
|
1160
|
+
*/
|
|
1161
|
+
type WaitUntilCallback = (promise: Promise<unknown>) => void;
|
|
1162
|
+
declare const FlowWaitUntil_base: Context.TagClass<FlowWaitUntil, "FlowWaitUntil", WaitUntilCallback>;
|
|
1163
|
+
/**
|
|
1164
|
+
* Optional WaitUntil service for background task management.
|
|
1165
|
+
* When provided, allows flows to execute beyond the HTTP response lifecycle.
|
|
1166
|
+
*
|
|
1167
|
+
* In Cloudflare Workers, use `ctx.executionCtx.waitUntil()`.
|
|
1168
|
+
* In other environments, this can be undefined (flows execute normally with Effect.fork).
|
|
1169
|
+
*
|
|
1170
|
+
* This service uses Effect's optional service pattern. Access it via:
|
|
1171
|
+
* ```typescript
|
|
1172
|
+
* const waitUntil = yield* FlowWaitUntil.optional;
|
|
1173
|
+
* if (Option.isSome(waitUntil)) {
|
|
1174
|
+
* // Use waitUntil.value
|
|
1175
|
+
* }
|
|
1176
|
+
* ```
|
|
1177
|
+
*
|
|
1178
|
+
* @see https://effect.website/docs/requirements-management/services/#optional-services
|
|
1179
|
+
*/
|
|
1180
|
+
declare class FlowWaitUntil extends FlowWaitUntil_base {
|
|
1181
|
+
static optional: Effect.Effect<Option.Option<WaitUntilCallback>, never, never>;
|
|
1182
|
+
}
|
|
1183
|
+
declare const FlowLifecycleHook_base: Context.TagClass<FlowLifecycleHook, "FlowLifecycleHook", {
|
|
1184
|
+
readonly onComplete: (ctx: {
|
|
1185
|
+
jobId: string;
|
|
1186
|
+
flowId: string;
|
|
1187
|
+
clientId: string | null;
|
|
1188
|
+
status: "completed" | "failed";
|
|
1189
|
+
}) => Effect.Effect<void>;
|
|
1190
|
+
}>;
|
|
1191
|
+
/**
|
|
1192
|
+
* Optional lifecycle hook for flow execution events.
|
|
1193
|
+
* Called when a flow completes or fails, enabling usage tracking,
|
|
1194
|
+
* billing, and other post-processing directly in the execution pipeline.
|
|
1195
|
+
*
|
|
1196
|
+
* This follows the same optional service pattern as {@link DeadLetterQueueService}.
|
|
1197
|
+
* When provided, the hook fires reliably from the flow daemon — not from
|
|
1198
|
+
* polling endpoints — ensuring it always runs exactly once per flow execution.
|
|
1199
|
+
*/
|
|
1200
|
+
declare class FlowLifecycleHook extends FlowLifecycleHook_base {
|
|
1201
|
+
static optional: Effect.Effect<Option.Option<{
|
|
1202
|
+
readonly onComplete: (ctx: {
|
|
1203
|
+
jobId: string;
|
|
1204
|
+
flowId: string;
|
|
1205
|
+
clientId: string | null;
|
|
1206
|
+
status: "completed" | "failed";
|
|
1207
|
+
}) => Effect.Effect<void>;
|
|
1208
|
+
}>, never, never>;
|
|
1209
|
+
}
|
|
1210
|
+
/**
|
|
1211
|
+
* Flow provider interface that applications must implement.
|
|
1212
|
+
*
|
|
1213
|
+
* This interface defines how the FlowEngine retrieves flow definitions.
|
|
1214
|
+
* Applications provide their own implementation to load flows from a database,
|
|
1215
|
+
* configuration files, or any other source.
|
|
1216
|
+
*
|
|
1217
|
+
* @template TRequirements - Additional Effect requirements for flow execution
|
|
1218
|
+
*
|
|
1219
|
+
* @property getFlow - Retrieves a flow definition by ID with authorization check
|
|
1220
|
+
*
|
|
1221
|
+
* @example
|
|
1222
|
+
* ```typescript
|
|
1223
|
+
* // Implement a flow provider from database
|
|
1224
|
+
* const dbFlowProvider: FlowProviderShape = {
|
|
1225
|
+
* getFlow: (flowId, clientId) => Effect.gen(function* () {
|
|
1226
|
+
* // Load flow from database
|
|
1227
|
+
* const flowData = yield* db.getFlow(flowId);
|
|
1228
|
+
*
|
|
1229
|
+
* // Check authorization
|
|
1230
|
+
* if (flowData.ownerId !== clientId) {
|
|
1231
|
+
* return yield* Effect.fail(
|
|
1232
|
+
* UploadistaError.fromCode("FLOW_NOT_AUTHORIZED")
|
|
1233
|
+
* );
|
|
1234
|
+
* }
|
|
1235
|
+
*
|
|
1236
|
+
* // Create flow instance
|
|
1237
|
+
* return createFlow(flowData);
|
|
1238
|
+
* })
|
|
1239
|
+
* };
|
|
1240
|
+
*
|
|
1241
|
+
* // Provide to FlowEngine
|
|
1242
|
+
* const flowProviderLayer = Layer.succeed(FlowProvider, dbFlowProvider);
|
|
1243
|
+
* ```
|
|
1244
|
+
*/
|
|
1245
|
+
type FlowProviderShape<TRequirements = any> = {
|
|
1246
|
+
getFlow: (flowId: string, clientId: string | null) => Effect.Effect<Flow<any, any, TRequirements>, UploadistaError>;
|
|
1247
|
+
};
|
|
1248
|
+
declare const FlowProvider_base: Context.TagClass<FlowProvider, "FlowProvider", FlowProviderShape<any>>;
|
|
1249
|
+
/**
|
|
1250
|
+
* Effect-TS context tag for the FlowProvider service.
|
|
1251
|
+
*
|
|
1252
|
+
* Applications must provide an implementation of FlowProviderShape
|
|
1253
|
+
* to enable the FlowEngine to retrieve flow definitions.
|
|
1254
|
+
*
|
|
1255
|
+
* @example
|
|
1256
|
+
* ```typescript
|
|
1257
|
+
* // Access FlowProvider in an Effect
|
|
1258
|
+
* const effect = Effect.gen(function* () {
|
|
1259
|
+
* const provider = yield* FlowProvider;
|
|
1260
|
+
* const flow = yield* provider.getFlow("flow123", "client456");
|
|
1261
|
+
* return flow;
|
|
1262
|
+
* });
|
|
1263
|
+
* ```
|
|
1264
|
+
*/
|
|
1265
|
+
declare class FlowProvider extends FlowProvider_base {}
|
|
1266
|
+
/**
|
|
1267
|
+
* FlowServer service interface.
|
|
1268
|
+
*
|
|
1269
|
+
* This is the core flow processing service that executes DAG-based file processing pipelines.
|
|
1270
|
+
* It manages flow execution, job tracking, node processing, pause/resume functionality,
|
|
1271
|
+
* and real-time event broadcasting.
|
|
1272
|
+
*
|
|
1273
|
+
* All operations return Effect types for composable, type-safe error handling.
|
|
1274
|
+
*
|
|
1275
|
+
* @property getFlow - Retrieves a flow definition by ID
|
|
1276
|
+
* @property getFlowData - Retrieves flow metadata (nodes, edges) without full flow instance
|
|
1277
|
+
* @property runFlow - Starts a new flow execution and returns immediately with job ID
|
|
1278
|
+
* @property resumeFlow - Resumes a paused flow with new data for a specific node
|
|
1279
|
+
* @property pauseFlow - Pauses a running flow (user-initiated pause)
|
|
1280
|
+
* @property cancelFlow - Cancels a running or paused flow and cleans up resources
|
|
1281
|
+
* @property getJobStatus - Retrieves current status and results of a flow job
|
|
1282
|
+
* @property subscribeToFlowEvents - Subscribes WebSocket to flow execution events
|
|
1283
|
+
* @property unsubscribeFromFlowEvents - Unsubscribes from flow events
|
|
1284
|
+
*
|
|
1285
|
+
* @example
|
|
1286
|
+
* ```typescript
|
|
1287
|
+
* // Execute a flow
|
|
1288
|
+
* const program = Effect.gen(function* () {
|
|
1289
|
+
* const server = yield* FlowEngine;
|
|
1290
|
+
*
|
|
1291
|
+
* // Start flow execution (returns immediately)
|
|
1292
|
+
* const job = yield* server.runFlow({
|
|
1293
|
+
* flowId: "resize-optimize",
|
|
1294
|
+
* storageId: "s3-production",
|
|
1295
|
+
* clientId: "client123",
|
|
1296
|
+
* inputs: {
|
|
1297
|
+
* input_1: { uploadId: "upload_abc123" }
|
|
1298
|
+
* }
|
|
1299
|
+
* });
|
|
1300
|
+
*
|
|
1301
|
+
* // Subscribe to events
|
|
1302
|
+
* yield* server.subscribeToFlowEvents(job.id, websocket);
|
|
1303
|
+
*
|
|
1304
|
+
* // Poll for status
|
|
1305
|
+
* const status = yield* server.getJobStatus(job.id);
|
|
1306
|
+
* console.log(status.status); // "running", "paused", "completed", "failed", or "cancelled"
|
|
1307
|
+
*
|
|
1308
|
+
* // User can pause the flow
|
|
1309
|
+
* yield* server.pauseFlow(job.id, "client123");
|
|
1310
|
+
*
|
|
1311
|
+
* return job;
|
|
1312
|
+
* });
|
|
1313
|
+
*
|
|
1314
|
+
* // Resume a paused flow
|
|
1315
|
+
* const resume = Effect.gen(function* () {
|
|
1316
|
+
* const server = yield* FlowEngine;
|
|
1317
|
+
*
|
|
1318
|
+
* // Flow paused waiting for user input at node "approval_1"
|
|
1319
|
+
* const job = yield* server.resumeFlow({
|
|
1320
|
+
* jobId: "job123",
|
|
1321
|
+
* nodeId: "approval_1",
|
|
1322
|
+
* newData: { approved: true },
|
|
1323
|
+
* clientId: "client123"
|
|
1324
|
+
* });
|
|
1325
|
+
*
|
|
1326
|
+
* return job;
|
|
1327
|
+
* });
|
|
1328
|
+
*
|
|
1329
|
+
* // Cancel a flow
|
|
1330
|
+
* const cancel = Effect.gen(function* () {
|
|
1331
|
+
* const server = yield* FlowEngine;
|
|
1332
|
+
*
|
|
1333
|
+
* // Cancel flow and cleanup intermediate files
|
|
1334
|
+
* const job = yield* server.cancelFlow("job123", "client123");
|
|
1335
|
+
*
|
|
1336
|
+
* return job;
|
|
1337
|
+
* });
|
|
1338
|
+
*
|
|
1339
|
+
* // Check flow structure before execution
|
|
1340
|
+
* const inspect = Effect.gen(function* () {
|
|
1341
|
+
* const server = yield* FlowEngine;
|
|
1342
|
+
*
|
|
1343
|
+
* const flowData = yield* server.getFlowData("resize-optimize", "client123");
|
|
1344
|
+
* console.log("Nodes:", flowData.nodes);
|
|
1345
|
+
* console.log("Edges:", flowData.edges);
|
|
1346
|
+
*
|
|
1347
|
+
* return flowData;
|
|
1348
|
+
* });
|
|
1349
|
+
* ```
|
|
1350
|
+
*/
|
|
1351
|
+
type FlowEngineShape = {
|
|
1352
|
+
getFlow: <TRequirements>(flowId: string, clientId: string | null) => Effect.Effect<Flow<any, any, TRequirements>, UploadistaError>;
|
|
1353
|
+
getFlowData: (flowId: string, clientId: string | null) => Effect.Effect<FlowData, UploadistaError>;
|
|
1354
|
+
runFlow: <TRequirements>({
|
|
1355
|
+
flowId,
|
|
1356
|
+
storageId,
|
|
1357
|
+
clientId,
|
|
1358
|
+
inputs,
|
|
1359
|
+
jobId
|
|
1360
|
+
}: {
|
|
1361
|
+
flowId: string;
|
|
1362
|
+
storageId: string;
|
|
1363
|
+
clientId: string | null;
|
|
1364
|
+
inputs: any; /** Optional job ID to use instead of generating a new UUID. Used by the queue worker to keep the queue item ID and flow job ID in sync. */
|
|
1365
|
+
jobId?: string;
|
|
1366
|
+
}) => Effect.Effect<FlowJob, UploadistaError, TRequirements>;
|
|
1367
|
+
resumeFlow: <TRequirements>({
|
|
1368
|
+
jobId,
|
|
1369
|
+
nodeId,
|
|
1370
|
+
newData,
|
|
1371
|
+
clientId
|
|
1372
|
+
}: {
|
|
1373
|
+
jobId: string;
|
|
1374
|
+
nodeId: string;
|
|
1375
|
+
newData: unknown;
|
|
1376
|
+
clientId: string | null;
|
|
1377
|
+
}) => Effect.Effect<FlowJob, UploadistaError, TRequirements>;
|
|
1378
|
+
pauseFlow: (jobId: string, clientId: string | null) => Effect.Effect<FlowJob, UploadistaError>;
|
|
1379
|
+
cancelFlow: (jobId: string, clientId: string | null) => Effect.Effect<FlowJob, UploadistaError>;
|
|
1380
|
+
getJobStatus: (jobId: string) => Effect.Effect<FlowJob, UploadistaError>;
|
|
1381
|
+
subscribeToFlowEvents: (jobId: string, connection: WebSocketConnection) => Effect.Effect<void, UploadistaError>;
|
|
1382
|
+
unsubscribeFromFlowEvents: (jobId: string) => Effect.Effect<void, UploadistaError>;
|
|
1383
|
+
};
|
|
1384
|
+
declare const FlowEngine_base: Context.TagClass<FlowEngine, "FlowEngine", FlowEngineShape>;
|
|
1385
|
+
/**
|
|
1386
|
+
* Effect-TS context tag for the FlowEngine service.
|
|
1387
|
+
*
|
|
1388
|
+
* Use this tag to access the FlowEngine in an Effect context.
|
|
1389
|
+
* The server must be provided via a Layer or dependency injection.
|
|
1390
|
+
*
|
|
1391
|
+
* @example
|
|
1392
|
+
* ```typescript
|
|
1393
|
+
* // Access FlowEngine in an Effect
|
|
1394
|
+
* const flowEffect = Effect.gen(function* () {
|
|
1395
|
+
* const server = yield* FlowEngine;
|
|
1396
|
+
* const job = yield* server.runFlow({
|
|
1397
|
+
* flowId: "my-flow",
|
|
1398
|
+
* storageId: "s3",
|
|
1399
|
+
* clientId: null,
|
|
1400
|
+
* inputs: {}
|
|
1401
|
+
* });
|
|
1402
|
+
* return job;
|
|
1403
|
+
* });
|
|
1404
|
+
*
|
|
1405
|
+
* // Provide FlowEngine layer
|
|
1406
|
+
* const program = flowEffect.pipe(
|
|
1407
|
+
* Effect.provide(flowServer),
|
|
1408
|
+
* Effect.provide(flowProviderLayer),
|
|
1409
|
+
* Effect.provide(flowJobKvStore)
|
|
1410
|
+
* );
|
|
1411
|
+
* ```
|
|
1412
|
+
*/
|
|
1413
|
+
declare class FlowEngine extends FlowEngine_base {}
|
|
1414
|
+
/**
|
|
1415
|
+
* Legacy configuration options for FlowEngine.
|
|
1416
|
+
*
|
|
1417
|
+
* @deprecated Use Effect Layers and FlowProvider instead.
|
|
1418
|
+
* This type is kept for backward compatibility.
|
|
1419
|
+
*
|
|
1420
|
+
* @property getFlow - Function to retrieve flow definitions
|
|
1421
|
+
* @property kvStore - KV store for flow job metadata
|
|
1422
|
+
*/
|
|
1423
|
+
type FlowEngineOptions = {
|
|
1424
|
+
getFlow: <TRequirements>({
|
|
1425
|
+
flowId,
|
|
1426
|
+
storageId
|
|
1427
|
+
}: {
|
|
1428
|
+
flowId: string;
|
|
1429
|
+
storageId: string;
|
|
1430
|
+
}) => Promise<Flow<any, any, TRequirements>>;
|
|
1431
|
+
kvStore: KvStore<FlowJob>;
|
|
1432
|
+
};
|
|
1433
|
+
declare function createFlowEngine(): Effect.Effect<{
|
|
1434
|
+
getFlow: <TRequirements>(flowId: string, clientId: string | null) => Effect.Effect<Flow<any, any, any>, UploadistaError, never>;
|
|
1435
|
+
getFlowData: (flowId: string, clientId: string | null) => Effect.Effect<FlowData, UploadistaError, never>;
|
|
1436
|
+
runFlow: ({
|
|
1437
|
+
flowId,
|
|
1438
|
+
storageId,
|
|
1439
|
+
clientId,
|
|
1440
|
+
inputs,
|
|
1441
|
+
jobId: providedJobId
|
|
1442
|
+
}: {
|
|
1443
|
+
flowId: string;
|
|
1444
|
+
storageId: string;
|
|
1445
|
+
clientId: string | null;
|
|
1446
|
+
inputs: unknown;
|
|
1447
|
+
jobId?: string;
|
|
1448
|
+
}) => Effect.Effect<FlowJob | {
|
|
1449
|
+
id: string;
|
|
1450
|
+
flowId: string;
|
|
1451
|
+
storageId: string;
|
|
1452
|
+
clientId: string | null;
|
|
1453
|
+
status: "pending";
|
|
1454
|
+
tasks: never[];
|
|
1455
|
+
createdAt: Date;
|
|
1456
|
+
updatedAt: Date;
|
|
1457
|
+
}, UploadistaError, never>;
|
|
1458
|
+
getJobStatus: (jobId: string) => Effect.Effect<FlowJob, UploadistaError, never>;
|
|
1459
|
+
resumeFlow: ({
|
|
1460
|
+
jobId,
|
|
1461
|
+
nodeId,
|
|
1462
|
+
newData,
|
|
1463
|
+
clientId
|
|
1464
|
+
}: {
|
|
1465
|
+
jobId: string;
|
|
1466
|
+
nodeId: string;
|
|
1467
|
+
newData: unknown;
|
|
1468
|
+
clientId: string | null;
|
|
1469
|
+
}) => Effect.Effect<FlowJob, UploadistaError, never>;
|
|
1470
|
+
pauseFlow: (jobId: string, clientId: string | null) => Effect.Effect<FlowJob, UploadistaError, never>;
|
|
1471
|
+
cancelFlow: (jobId: string, clientId: string | null) => Effect.Effect<FlowJob, UploadistaError, never>;
|
|
1472
|
+
subscribeToFlowEvents: (jobId: string, connection: WebSocketConnection) => Effect.Effect<void, UploadistaError, never>;
|
|
1473
|
+
unsubscribeFromFlowEvents: (jobId: string) => Effect.Effect<void, UploadistaError, never>;
|
|
1474
|
+
}, never, FlowEventEmitter | FlowJobKVStore | FlowProvider | UploadEngine>;
|
|
1475
|
+
declare const flowEngine: Layer.Layer<FlowEngine, never, FlowEventEmitter | FlowJobKVStore | FlowProvider | UploadEngine>;
|
|
1476
|
+
type FlowEngineLayer = typeof flowEngine;
|
|
1477
|
+
//#endregion
|
|
1478
|
+
//#region src/flow/flow-queue.d.ts
|
|
1479
|
+
declare const FlowQueueDispatchMarker_base: Context.TagClass<FlowQueueDispatchMarker, "FlowQueueDispatchMarker", true>;
|
|
1480
|
+
/**
|
|
1481
|
+
* Context marker that signals the current Effect is running inside the
|
|
1482
|
+
* FlowQueueService worker dispatch loop.
|
|
1483
|
+
*
|
|
1484
|
+
* When this marker is present in the Effect context, FlowEngine.runFlow()
|
|
1485
|
+
* skips the FlowQueueService delegation and executes directly via forkDaemon.
|
|
1486
|
+
* This prevents infinite re-enqueue cycles when the worker calls runFlow.
|
|
1487
|
+
*
|
|
1488
|
+
* @internal
|
|
1489
|
+
*/
|
|
1490
|
+
declare class FlowQueueDispatchMarker extends FlowQueueDispatchMarker_base {}
|
|
1491
|
+
/**
|
|
1492
|
+
* Shape of the FlowQueueService.
|
|
1493
|
+
*
|
|
1494
|
+
* All operations return Effect types for composable, type-safe error handling.
|
|
1495
|
+
*/
|
|
1496
|
+
interface FlowQueueServiceShape {
|
|
1497
|
+
/**
|
|
1498
|
+
* Enqueue a flow for execution.
|
|
1499
|
+
*
|
|
1500
|
+
* Returns immediately with a FlowQueueItem in "pending" state.
|
|
1501
|
+
* The worker loop will dispatch the flow when a concurrency slot is available.
|
|
1502
|
+
*
|
|
1503
|
+
* @param params - Flow execution parameters
|
|
1504
|
+
* @returns The created queue item with status "pending"
|
|
1505
|
+
*/
|
|
1506
|
+
enqueue(params: {
|
|
1507
|
+
flowId: string;
|
|
1508
|
+
storageId: string;
|
|
1509
|
+
input: unknown;
|
|
1510
|
+
clientId: string | null;
|
|
1511
|
+
dlqItemId?: string;
|
|
1512
|
+
}): Effect.Effect<FlowQueueItem, UploadistaError>;
|
|
1513
|
+
/**
|
|
1514
|
+
* Retrieve the current status of a queue item by ID.
|
|
1515
|
+
*
|
|
1516
|
+
* @param itemId - The queue item ID
|
|
1517
|
+
* @returns The queue item
|
|
1518
|
+
* @throws QUEUE_ITEM_NOT_FOUND if the ID is unknown
|
|
1519
|
+
*/
|
|
1520
|
+
getStatus(itemId: string): Effect.Effect<FlowQueueItem, UploadistaError>;
|
|
1521
|
+
/**
|
|
1522
|
+
* Cancel a pending queue item before it starts executing.
|
|
1523
|
+
*
|
|
1524
|
+
* @param itemId - The queue item ID
|
|
1525
|
+
* @throws QUEUE_ITEM_ALREADY_RUNNING if the item is already running
|
|
1526
|
+
*/
|
|
1527
|
+
cancel(itemId: string): Effect.Effect<void, UploadistaError>;
|
|
1528
|
+
/**
|
|
1529
|
+
* List queue items, optionally filtered by status.
|
|
1530
|
+
*
|
|
1531
|
+
* @param options - Optional filter options
|
|
1532
|
+
* @returns Array of matching queue items
|
|
1533
|
+
*/
|
|
1534
|
+
list(options?: {
|
|
1535
|
+
status?: FlowQueueItem["status"];
|
|
1536
|
+
}): Effect.Effect<FlowQueueItem[], UploadistaError>;
|
|
1537
|
+
/**
|
|
1538
|
+
* Get aggregate queue statistics for monitoring.
|
|
1539
|
+
*
|
|
1540
|
+
* @returns Current queue stats including counts and concurrency info
|
|
1541
|
+
*/
|
|
1542
|
+
getStats(): Effect.Effect<FlowQueueStats, UploadistaError>;
|
|
1543
|
+
}
|
|
1544
|
+
declare const FlowQueueService_base: Context.TagClass<FlowQueueService, "FlowQueueService", FlowQueueServiceShape>;
|
|
1545
|
+
/**
|
|
1546
|
+
* Effect-TS context tag for the FlowQueueService.
|
|
1547
|
+
*
|
|
1548
|
+
* Use `FlowQueueService.optional` to resolve it optionally — this is the
|
|
1549
|
+
* pattern used in FlowEngine to preserve backward compatibility.
|
|
1550
|
+
*
|
|
1551
|
+
* @example
|
|
1552
|
+
* ```typescript
|
|
1553
|
+
* // In FlowEngine.runFlow()
|
|
1554
|
+
* const queueOption = yield* FlowQueueService.optional;
|
|
1555
|
+
* if (Option.isSome(queueOption)) {
|
|
1556
|
+
* return yield* queueOption.value.enqueue({ flowId, storageId, input, clientId });
|
|
1557
|
+
* }
|
|
1558
|
+
* // ... existing fork path
|
|
1559
|
+
*
|
|
1560
|
+
* // From application code
|
|
1561
|
+
* const queue = yield* FlowQueueService;
|
|
1562
|
+
* const item = yield* queue.enqueue({ flowId: "my-flow", storageId: "s3", input: {}, clientId: null });
|
|
1563
|
+
* ```
|
|
1564
|
+
*/
|
|
1565
|
+
declare class FlowQueueService extends FlowQueueService_base {
|
|
1566
|
+
/**
|
|
1567
|
+
* Access the FlowQueueService optionally.
|
|
1568
|
+
* Returns Option.none() if the service is not present in the layer.
|
|
1569
|
+
*
|
|
1570
|
+
* Use this in FlowEngine to remain backward-compatible.
|
|
1571
|
+
*/
|
|
1572
|
+
static readonly optional: Effect.Effect<Option.Option<FlowQueueServiceShape>, never, never>;
|
|
1573
|
+
/**
|
|
1574
|
+
* Create a FlowQueueService Layer using the default in-memory store.
|
|
1575
|
+
*
|
|
1576
|
+
* @param config - Optional configuration overrides
|
|
1577
|
+
* @returns A Layer providing FlowQueueService
|
|
1578
|
+
*/
|
|
1579
|
+
static Default(config?: FlowQueueConfig): Layer.Layer<FlowQueueService, never, FlowEngine>;
|
|
1580
|
+
/**
|
|
1581
|
+
* Create a FlowQueueService Layer with a custom store.
|
|
1582
|
+
*
|
|
1583
|
+
* @param config - Configuration (maxConcurrency, dlqRetryIntervalMs, dlqRetryBatchSize)
|
|
1584
|
+
* @param store - The FlowQueueStore implementation to use
|
|
1585
|
+
* @returns A Layer providing FlowQueueService
|
|
1586
|
+
*/
|
|
1587
|
+
static make(config: FlowQueueConfig, store: FlowQueueStore): Layer.Layer<FlowQueueService, never, FlowEngine>;
|
|
1588
|
+
/**
|
|
1589
|
+
* Create a FlowQueueService Layer backed by the application's BaseKvStoreService.
|
|
1590
|
+
*
|
|
1591
|
+
* Items are persisted under the "uploadista:queue-item:" key prefix, using the
|
|
1592
|
+
* same KV store already configured for the server (Redis, Cloudflare KV, etc.).
|
|
1593
|
+
* This is the recommended factory for most deployments — no separate store
|
|
1594
|
+
* dependency is needed beyond the kvStore already wired at server level.
|
|
1595
|
+
*
|
|
1596
|
+
* @param config - Optional queue configuration (maxConcurrency, retry intervals…)
|
|
1597
|
+
* @returns A Layer providing FlowQueueService, requiring FlowEngine and BaseKvStoreService
|
|
1598
|
+
*
|
|
1599
|
+
* @example
|
|
1600
|
+
* ```typescript
|
|
1601
|
+
* // In createUploadistaServer — flowQueue: true uses this automatically
|
|
1602
|
+
* FlowQueueService.fromKvStore({ maxConcurrency: 8 })
|
|
1603
|
+
* .pipe(Layer.provide(flowEngineLayer), Layer.provide(kvStore))
|
|
1604
|
+
* ```
|
|
1605
|
+
*/
|
|
1606
|
+
static fromKvStore(config?: FlowQueueConfig): Layer.Layer<FlowQueueService, never, FlowEngine | FlowQueueKVStore>;
|
|
1607
|
+
/**
|
|
1608
|
+
* Shorthand for fromKvStore — creates the full layer including the KV store
|
|
1609
|
+
* sub-layer, requiring only FlowEngine and BaseKvStoreService.
|
|
1610
|
+
*/
|
|
1611
|
+
static fromBaseKvStore(config?: FlowQueueConfig): Layer.Layer<FlowQueueService, never, FlowEngine | BaseKvStoreService>;
|
|
1612
|
+
}
|
|
1613
|
+
//#endregion
|
|
1614
|
+
//#region src/flow/nodes/input-node.d.ts
|
|
1615
|
+
/**
|
|
1616
|
+
* Union schema for all input operations.
|
|
1617
|
+
* Defines the possible input data structures for the input node.
|
|
1618
|
+
*/
|
|
1619
|
+
declare const inputDataSchema: z.ZodUnion<readonly [z.ZodObject<{
|
|
1620
|
+
operation: z.ZodLiteral<"init">;
|
|
1621
|
+
storageId: z.ZodString;
|
|
1622
|
+
metadata: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodAny>>;
|
|
1623
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
1624
|
+
operation: z.ZodLiteral<"finalize">;
|
|
1625
|
+
uploadId: z.ZodString;
|
|
1626
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
1627
|
+
operation: z.ZodLiteral<"url">;
|
|
1628
|
+
url: z.ZodString;
|
|
1629
|
+
storageId: z.ZodOptional<z.ZodString>;
|
|
1630
|
+
metadata: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodAny>>;
|
|
1631
|
+
}, z.core.$strip>]>;
|
|
1632
|
+
/**
|
|
1633
|
+
* Type representing the input data for the input node.
|
|
1634
|
+
* Can be one of three operation types: init, finalize, or url.
|
|
1635
|
+
*/
|
|
1636
|
+
type InputData = z.infer<typeof inputDataSchema>;
|
|
1637
|
+
/**
|
|
1638
|
+
* Schema for input node filtering parameters.
|
|
1639
|
+
* Defines validation rules for incoming files.
|
|
1640
|
+
*/
|
|
1641
|
+
declare const inputNodeParamsSchema: z.ZodObject<{
|
|
1642
|
+
allowedMimeTypes: z.ZodOptional<z.ZodArray<z.ZodString>>;
|
|
1643
|
+
minSize: z.ZodOptional<z.ZodNumber>;
|
|
1644
|
+
maxSize: z.ZodOptional<z.ZodNumber>;
|
|
1645
|
+
}, z.core.$strip>;
|
|
1646
|
+
/**
|
|
1647
|
+
* Parameters for configuring input node validation.
|
|
1648
|
+
* Controls which files are accepted based on type and size constraints.
|
|
1649
|
+
*/
|
|
1650
|
+
type InputNodeParams = z.infer<typeof inputNodeParamsSchema>;
|
|
1651
|
+
/**
|
|
1652
|
+
* Creates an input node for handling file input through multiple methods.
|
|
1653
|
+
*
|
|
1654
|
+
* The input node supports three operation types:
|
|
1655
|
+
* - `init`: Initialize a streaming upload session
|
|
1656
|
+
* - `finalize`: Complete a streaming upload after all chunks are uploaded
|
|
1657
|
+
* - `url`: Fetch a file directly from a URL
|
|
1658
|
+
*
|
|
1659
|
+
* @param id - Unique identifier for the node
|
|
1660
|
+
* @param params - Optional validation parameters for filtering incoming files
|
|
1661
|
+
* @returns An Effect that creates a flow node configured for file input
|
|
1662
|
+
*
|
|
1663
|
+
* @example
|
|
1664
|
+
* ```typescript
|
|
1665
|
+
* // Create input node with validation
|
|
1666
|
+
* const inputNode = yield* createInputNode("file-input", {
|
|
1667
|
+
* allowedMimeTypes: ["image/*", "application/pdf"],
|
|
1668
|
+
* maxSize: 10 * 1024 * 1024, // 10MB
|
|
1669
|
+
* });
|
|
1670
|
+
*
|
|
1671
|
+
* // Create input node without validation
|
|
1672
|
+
* const openInputNode = yield* createInputNode("open-input");
|
|
1673
|
+
* ```
|
|
1674
|
+
*/
|
|
1675
|
+
declare function createInputNode(id: string, params?: InputNodeParams, options?: {
|
|
1676
|
+
keepOutput?: boolean;
|
|
1677
|
+
}): Effect.Effect<FlowNodeData & {
|
|
1678
|
+
inputSchema: z.ZodType<{
|
|
1679
|
+
operation: "init";
|
|
1680
|
+
storageId: string;
|
|
1681
|
+
metadata?: Record<string, any> | undefined;
|
|
1682
|
+
} | {
|
|
1683
|
+
operation: "finalize";
|
|
1684
|
+
uploadId: string;
|
|
1685
|
+
} | {
|
|
1686
|
+
operation: "url";
|
|
1687
|
+
url: string;
|
|
1688
|
+
storageId?: string | undefined;
|
|
1689
|
+
metadata?: Record<string, any> | undefined;
|
|
1690
|
+
}, unknown, z.core.$ZodTypeInternals<{
|
|
1691
|
+
operation: "init";
|
|
1692
|
+
storageId: string;
|
|
1693
|
+
metadata?: Record<string, any> | undefined;
|
|
1694
|
+
} | {
|
|
1695
|
+
operation: "finalize";
|
|
1696
|
+
uploadId: string;
|
|
1697
|
+
} | {
|
|
1698
|
+
operation: "url";
|
|
1699
|
+
url: string;
|
|
1700
|
+
storageId?: string | undefined;
|
|
1701
|
+
metadata?: Record<string, any> | undefined;
|
|
1702
|
+
}, unknown>>;
|
|
1703
|
+
outputSchema: z.ZodType<UploadFile, unknown, z.core.$ZodTypeInternals<UploadFile, unknown>>;
|
|
1704
|
+
run: (args: {
|
|
1705
|
+
data: {
|
|
1706
|
+
operation: "init";
|
|
1707
|
+
storageId: string;
|
|
1708
|
+
metadata?: Record<string, any> | undefined;
|
|
1709
|
+
} | {
|
|
1710
|
+
operation: "finalize";
|
|
1711
|
+
uploadId: string;
|
|
1712
|
+
} | {
|
|
1713
|
+
operation: "url";
|
|
1714
|
+
url: string;
|
|
1715
|
+
storageId?: string | undefined;
|
|
1716
|
+
metadata?: Record<string, any> | undefined;
|
|
1717
|
+
};
|
|
1718
|
+
jobId: string;
|
|
1719
|
+
storageId: string;
|
|
1720
|
+
flowId: string;
|
|
1721
|
+
inputs?: Record<string, unknown>;
|
|
1722
|
+
clientId: string | null;
|
|
1723
|
+
}) => Effect.Effect<NodeExecutionResult<UploadFile>, UploadistaError, never>;
|
|
1724
|
+
condition?: {
|
|
1725
|
+
field: string;
|
|
1726
|
+
operator: string;
|
|
1727
|
+
value: unknown;
|
|
1728
|
+
};
|
|
1729
|
+
multiInput?: boolean;
|
|
1730
|
+
multiOutput?: boolean;
|
|
1731
|
+
pausable?: boolean;
|
|
1732
|
+
retry?: {
|
|
1733
|
+
maxRetries?: number;
|
|
1734
|
+
retryDelay?: number;
|
|
1735
|
+
exponentialBackoff?: boolean;
|
|
1736
|
+
};
|
|
1737
|
+
circuitBreaker?: FlowCircuitBreakerConfig;
|
|
1738
|
+
} & {
|
|
1739
|
+
type: NodeType.input;
|
|
1740
|
+
}, UploadistaError, UploadEngine>;
|
|
1741
|
+
//#endregion
|
|
1742
|
+
//#region src/flow/types/flow-file.d.ts
|
|
1743
|
+
/**
|
|
1744
|
+
* Conditional execution rules for flow nodes.
|
|
1745
|
+
*
|
|
1746
|
+
* Conditions allow nodes to execute conditionally based on file properties or metadata.
|
|
1747
|
+
* They are evaluated before node execution and can skip nodes that don't match.
|
|
1748
|
+
*
|
|
1749
|
+
* @module flow/types/flow-file
|
|
1750
|
+
* @see {@link FlowNode} for how conditions are used in nodes
|
|
1751
|
+
*
|
|
1752
|
+
* @example
|
|
1753
|
+
* ```typescript
|
|
1754
|
+
* // Only process images larger than 1MB
|
|
1755
|
+
* const condition: FlowCondition = {
|
|
1756
|
+
* field: "size",
|
|
1757
|
+
* operator: "greaterThan",
|
|
1758
|
+
* value: 1024 * 1024
|
|
1759
|
+
* };
|
|
1760
|
+
*
|
|
1761
|
+
* // Only process JPEG images
|
|
1762
|
+
* const jpegCondition: FlowCondition = {
|
|
1763
|
+
* field: "mimeType",
|
|
1764
|
+
* operator: "startsWith",
|
|
1765
|
+
* value: "image/jpeg"
|
|
1766
|
+
* };
|
|
1767
|
+
* ```
|
|
1768
|
+
*/
|
|
1769
|
+
/**
|
|
1770
|
+
* Represents a conditional rule for node execution.
|
|
1771
|
+
*
|
|
1772
|
+
* @property field - The file property to check
|
|
1773
|
+
* @property operator - The comparison operator to apply
|
|
1774
|
+
* @property value - The value to compare against
|
|
1775
|
+
*
|
|
1776
|
+
* @remarks
|
|
1777
|
+
* - Fields can check file metadata (mimeType, size) or image properties (width, height)
|
|
1778
|
+
* - String operators (contains, startsWith) work with string values
|
|
1779
|
+
* - Numeric operators (greaterThan, lessThan) work with numeric values
|
|
1780
|
+
* - The extension field checks the file extension without the dot
|
|
1781
|
+
*/
|
|
1782
|
+
type FlowCondition = {
|
|
1783
|
+
field: "mimeType" | "size" | "width" | "height" | "extension";
|
|
1784
|
+
operator: "equals" | "notEquals" | "greaterThan" | "lessThan" | "contains" | "startsWith";
|
|
1785
|
+
value: string | number;
|
|
1786
|
+
};
|
|
1787
|
+
//#endregion
|
|
1788
|
+
//#region src/flow/types/type-utils.d.ts
|
|
1789
|
+
/**
|
|
1790
|
+
* Extracts the service type from an Effect Layer.
|
|
1791
|
+
*
|
|
1792
|
+
* Given a Layer that provides a service, this type utility extracts
|
|
1793
|
+
* the service type from the Layer's type signature.
|
|
1794
|
+
*
|
|
1795
|
+
* @template T - The Layer type to extract from
|
|
1796
|
+
* @returns The service type provided by the layer, or never if T is not a Layer
|
|
1797
|
+
*
|
|
1798
|
+
* @example
|
|
1799
|
+
* ```typescript
|
|
1800
|
+
* type MyLayer = Layer.Layer<ServiceA, never, never>;
|
|
1801
|
+
* type Service = ExtractLayerService<MyLayer>;
|
|
1802
|
+
* // Service = ServiceA
|
|
1803
|
+
* ```
|
|
1804
|
+
*
|
|
1805
|
+
* @example
|
|
1806
|
+
* ```typescript
|
|
1807
|
+
* import { ImagePluginLayer } from '@uploadista/core';
|
|
1808
|
+
*
|
|
1809
|
+
* type ImageService = ExtractLayerService<ImagePluginLayer>;
|
|
1810
|
+
* // ImageService = ImagePlugin
|
|
1811
|
+
* ```
|
|
1812
|
+
*/
|
|
1813
|
+
type ExtractLayerService<T, TError = never, TRequirements = never> = T extends Layer.Layer<infer S, TError, TRequirements> ? S : never;
|
|
1814
|
+
/**
|
|
1815
|
+
* Extracts all service types from a tuple of layers and returns them as a union.
|
|
1816
|
+
*
|
|
1817
|
+
* This type recursively processes a tuple of Layer types and extracts all
|
|
1818
|
+
* the services they provide, combining them into a single union type.
|
|
1819
|
+
*
|
|
1820
|
+
* @template T - A readonly tuple of Layer types
|
|
1821
|
+
* @returns A union of all service types provided by the layers, or never for empty tuples
|
|
1822
|
+
*
|
|
1823
|
+
* @example
|
|
1824
|
+
* ```typescript
|
|
1825
|
+
* type Layers = [
|
|
1826
|
+
* Layer.Layer<ServiceA, never, never>,
|
|
1827
|
+
* Layer.Layer<ServiceB, never, never>,
|
|
1828
|
+
* Layer.Layer<ServiceC, never, never>
|
|
1829
|
+
* ];
|
|
1830
|
+
* type Services = ExtractLayerServices<Layers>;
|
|
1831
|
+
* // Services = ServiceA | ServiceB | ServiceC
|
|
1832
|
+
* ```
|
|
1833
|
+
*
|
|
1834
|
+
* @example
|
|
1835
|
+
* ```typescript
|
|
1836
|
+
* import { ImagePluginLayer, ZipPluginLayer } from '@uploadista/core';
|
|
1837
|
+
*
|
|
1838
|
+
* type PluginLayers = [ImagePluginLayer, ZipPluginLayer];
|
|
1839
|
+
* type AllServices = ExtractLayerServices<PluginLayers>;
|
|
1840
|
+
* // AllServices = ImagePlugin | ZipPlugin
|
|
1841
|
+
* ```
|
|
1842
|
+
*
|
|
1843
|
+
* @example
|
|
1844
|
+
* ```typescript
|
|
1845
|
+
* type EmptyLayers = [];
|
|
1846
|
+
* type NoServices = ExtractLayerServices<EmptyLayers>;
|
|
1847
|
+
* // NoServices = never
|
|
1848
|
+
* ```
|
|
1849
|
+
*/
|
|
1850
|
+
type ExtractLayerServices<T extends readonly Layer.Layer<any, any, any>[]> = T extends readonly [] ? never : { [K in keyof T]: T[K] extends Layer.Layer<infer S, any, any> ? S : never }[number];
|
|
1851
|
+
/**
|
|
1852
|
+
* Unwraps an Effect type to extract its success value type.
|
|
1853
|
+
*
|
|
1854
|
+
* If the input type is an Effect, this extracts the success type (first type parameter).
|
|
1855
|
+
* If the input is not an Effect, it returns the type unchanged.
|
|
1856
|
+
*
|
|
1857
|
+
* @template T - The type to resolve, potentially an Effect
|
|
1858
|
+
* @returns The success type if T is an Effect, otherwise T
|
|
1859
|
+
*
|
|
1860
|
+
* @example
|
|
1861
|
+
* ```typescript
|
|
1862
|
+
* type MyEffect = Effect.Effect<string, Error, never>;
|
|
1863
|
+
* type Result = ResolveEffect<MyEffect>;
|
|
1864
|
+
* // Result = string
|
|
1865
|
+
* ```
|
|
1866
|
+
*
|
|
1867
|
+
* @example
|
|
1868
|
+
* ```typescript
|
|
1869
|
+
* type NonEffect = { data: string };
|
|
1870
|
+
* type Result = ResolveEffect<NonEffect>;
|
|
1871
|
+
* // Result = { data: string }
|
|
1872
|
+
* ```
|
|
1873
|
+
*/
|
|
1874
|
+
type ResolveEffect<T> = T extends Effect.Effect<infer S, any, any> ? S : T;
|
|
1875
|
+
/**
|
|
1876
|
+
* Extracts the error type from an Effect.
|
|
1877
|
+
*
|
|
1878
|
+
* Given an Effect type, this utility extracts the error type
|
|
1879
|
+
* (second type parameter) from the Effect's type signature.
|
|
1880
|
+
*
|
|
1881
|
+
* @template T - The Effect type to extract from
|
|
1882
|
+
* @returns The error type of the Effect, or never if T is not an Effect
|
|
1883
|
+
*
|
|
1884
|
+
* @example
|
|
1885
|
+
* ```typescript
|
|
1886
|
+
* type MyEffect = Effect.Effect<string, ValidationError, never>;
|
|
1887
|
+
* type ErrorType = ExtractEffectError<MyEffect>;
|
|
1888
|
+
* // ErrorType = ValidationError
|
|
1889
|
+
* ```
|
|
1890
|
+
*
|
|
1891
|
+
* @example
|
|
1892
|
+
* ```typescript
|
|
1893
|
+
* type SafeEffect = Effect.Effect<number, never, SomeService>;
|
|
1894
|
+
* type ErrorType = ExtractEffectError<SafeEffect>;
|
|
1895
|
+
* // ErrorType = never (no errors possible)
|
|
1896
|
+
* ```
|
|
1897
|
+
*/
|
|
1898
|
+
type ExtractEffectError<T> = T extends Effect.Effect<any, infer E, any> ? E : never;
|
|
1899
|
+
/**
|
|
1900
|
+
* Extracts the requirements (context) type from an Effect.
|
|
1901
|
+
*
|
|
1902
|
+
* Given an Effect type, this utility extracts the requirements type
|
|
1903
|
+
* (third type parameter) from the Effect's type signature. This represents
|
|
1904
|
+
* the services that must be provided for the Effect to run.
|
|
1905
|
+
*
|
|
1906
|
+
* @template T - The Effect type to extract from
|
|
1907
|
+
* @returns The requirements type of the Effect, or never if T is not an Effect
|
|
1908
|
+
*
|
|
1909
|
+
* @example
|
|
1910
|
+
* ```typescript
|
|
1911
|
+
* type MyEffect = Effect.Effect<string, Error, Database | Logger>;
|
|
1912
|
+
* type Requirements = ExtractEffectRequirements<MyEffect>;
|
|
1913
|
+
* // Requirements = Database | Logger
|
|
1914
|
+
* ```
|
|
1915
|
+
*
|
|
1916
|
+
* @example
|
|
1917
|
+
* ```typescript
|
|
1918
|
+
* import { ImagePlugin, ZipPlugin } from '@uploadista/core';
|
|
1919
|
+
*
|
|
1920
|
+
* type ProcessEffect = Effect.Effect<
|
|
1921
|
+
* ProcessedImage,
|
|
1922
|
+
* ProcessError,
|
|
1923
|
+
* ImagePlugin | ZipPlugin
|
|
1924
|
+
* >;
|
|
1925
|
+
* type Needed = ExtractEffectRequirements<ProcessEffect>;
|
|
1926
|
+
* // Needed = ImagePlugin | ZipPlugin
|
|
1927
|
+
* ```
|
|
1928
|
+
*/
|
|
1929
|
+
type ExtractEffectRequirements<T> = T extends Effect.Effect<any, any, infer R> ? R : never;
|
|
1930
|
+
//#endregion
|
|
1931
|
+
//#region src/flow/nodes/transform-node.d.ts
|
|
1932
|
+
/**
|
|
1933
|
+
* Transform mode for controlling how file data is processed.
|
|
1934
|
+
*
|
|
1935
|
+
* - `buffered`: Always load entire file into memory before transforming (default, backward compatible)
|
|
1936
|
+
* - `streaming`: Process file as a stream of chunks for memory efficiency
|
|
1937
|
+
* - `auto`: Automatically select mode based on file size and DataStore capabilities
|
|
1938
|
+
*/
|
|
1939
|
+
type TransformMode = "buffered" | "streaming" | "auto";
|
|
1940
|
+
/**
|
|
1941
|
+
* Result type for streaming transforms.
|
|
1942
|
+
* Can return just the transformed stream, or include metadata changes.
|
|
1943
|
+
*/
|
|
1944
|
+
type StreamingTransformResult = Stream.Stream<Uint8Array, UploadistaError> | {
|
|
1945
|
+
stream: Stream.Stream<Uint8Array, UploadistaError>;
|
|
1946
|
+
type?: string;
|
|
1947
|
+
fileName?: string; /** Estimated output size in bytes (for progress tracking) */
|
|
1948
|
+
estimatedSize?: number;
|
|
1949
|
+
};
|
|
1950
|
+
/**
|
|
1951
|
+
* Function type for streaming transforms.
|
|
1952
|
+
* Receives an input stream and file metadata, returns a transformed stream.
|
|
1953
|
+
*/
|
|
1954
|
+
type StreamingTransformFn = (stream: Stream.Stream<Uint8Array, UploadistaError>, file: UploadFile) => Effect.Effect<StreamingTransformResult, UploadistaError>;
|
|
1955
|
+
/**
|
|
1956
|
+
* Configuration object for creating a transform node.
|
|
1957
|
+
*/
|
|
1958
|
+
interface TransformNodeConfig {
|
|
1959
|
+
/** Unique identifier for the node */
|
|
1960
|
+
id: string;
|
|
1961
|
+
/** Human-readable name for the node */
|
|
1962
|
+
name: string;
|
|
1963
|
+
/** Description of what the node does */
|
|
1964
|
+
description: string;
|
|
1965
|
+
/** Optional output type ID from outputTypeRegistry for result type registration */
|
|
1966
|
+
outputTypeId?: string;
|
|
1967
|
+
/**
|
|
1968
|
+
* Whether to keep this node's output as a flow result even if it has outgoing edges.
|
|
1969
|
+
* When true, the node's output will be included in the final flow outputs alongside topology sinks.
|
|
1970
|
+
* Defaults to false.
|
|
1971
|
+
*/
|
|
1972
|
+
keepOutput?: boolean;
|
|
1973
|
+
/**
|
|
1974
|
+
* Optional file naming configuration.
|
|
1975
|
+
* - undefined: Preserve original filename (backward compatible)
|
|
1976
|
+
* - mode: 'auto': Generate smart suffix based on node type
|
|
1977
|
+
* - mode: 'custom': Use template pattern or rename function
|
|
1978
|
+
*/
|
|
1979
|
+
naming?: FileNamingConfig;
|
|
1980
|
+
/**
|
|
1981
|
+
* Node type identifier used for auto-naming context.
|
|
1982
|
+
* Defaults to "transform" if not specified.
|
|
1983
|
+
*/
|
|
1984
|
+
nodeType?: string;
|
|
1985
|
+
/**
|
|
1986
|
+
* Stable node type identifier for circuit breaker configuration.
|
|
1987
|
+
* Used to share circuit breaker state across nodes of the same type
|
|
1988
|
+
* and for nodeTypeOverrides in flow config.
|
|
1989
|
+
* Example: "describe-image", "remove-background", "scan-virus"
|
|
1990
|
+
*/
|
|
1991
|
+
nodeTypeId?: string;
|
|
1992
|
+
/**
|
|
1993
|
+
* Additional variables to include in the naming context.
|
|
1994
|
+
* These are merged with the base context (flowId, jobId, etc.)
|
|
1995
|
+
* and can be used in templates.
|
|
1996
|
+
*/
|
|
1997
|
+
namingVars?: Record<string, string | number | undefined>;
|
|
1998
|
+
/**
|
|
1999
|
+
* Circuit breaker configuration for resilience against external service failures.
|
|
2000
|
+
* Overrides flow-level circuit breaker defaults for this node.
|
|
2001
|
+
*/
|
|
2002
|
+
circuitBreaker?: FlowCircuitBreakerConfig;
|
|
2003
|
+
/**
|
|
2004
|
+
* Transform mode controlling how file data is processed.
|
|
2005
|
+
* - `buffered`: Always load entire file into memory
|
|
2006
|
+
* - `streaming`: Process file as a stream of chunks
|
|
2007
|
+
* - `auto`: Select mode based on file size and DataStore capabilities (default)
|
|
2008
|
+
*
|
|
2009
|
+
* @default "auto"
|
|
2010
|
+
*/
|
|
2011
|
+
mode?: TransformMode;
|
|
2012
|
+
/**
|
|
2013
|
+
* Configuration for streaming mode (file size threshold, chunk size).
|
|
2014
|
+
* Only used when mode is "streaming" or "auto".
|
|
2015
|
+
*/
|
|
2016
|
+
streamingConfig?: StreamingConfig;
|
|
2017
|
+
/**
|
|
2018
|
+
* Function that transforms file bytes (buffered mode).
|
|
2019
|
+
* Required unless streamingTransform is provided and mode is "streaming".
|
|
2020
|
+
*/
|
|
2021
|
+
transform?: (bytes: Uint8Array, file: UploadFile) => Effect.Effect<Uint8Array | {
|
|
2022
|
+
bytes: Uint8Array;
|
|
2023
|
+
type?: string;
|
|
2024
|
+
fileName?: string;
|
|
2025
|
+
metadata?: Record<string, unknown>;
|
|
2026
|
+
}, UploadistaError>;
|
|
2027
|
+
/**
|
|
2028
|
+
* Function that transforms file as a stream (streaming mode).
|
|
2029
|
+
* For memory-efficient processing of large files.
|
|
2030
|
+
* Used when mode is "streaming" or when "auto" selects streaming.
|
|
2031
|
+
*/
|
|
2032
|
+
streamingTransform?: StreamingTransformFn;
|
|
2033
|
+
}
|
|
2034
|
+
/**
|
|
2035
|
+
* Creates a transform node that handles the common pattern of:
|
|
2036
|
+
* 1. Reading bytes from an UploadFile
|
|
2037
|
+
* 2. Transforming the bytes
|
|
2038
|
+
* 3. Uploading the result as a new UploadFile
|
|
2039
|
+
*
|
|
2040
|
+
* This simplifies nodes that just need to transform file bytes without
|
|
2041
|
+
* worrying about upload server interactions.
|
|
2042
|
+
*
|
|
2043
|
+
* Supports both buffered and streaming modes:
|
|
2044
|
+
* - **Buffered mode**: Loads entire file into memory, transforms, uploads
|
|
2045
|
+
* - **Streaming mode**: Processes file as chunks for memory efficiency with large files
|
|
2046
|
+
* - **Auto mode** (default): Selects mode based on file size and DataStore capabilities
|
|
2047
|
+
*
|
|
2048
|
+
* @param config - Configuration object for the transform node
|
|
2049
|
+
* @returns An Effect that creates a flow node configured for file transformation
|
|
2050
|
+
*
|
|
2051
|
+
* @example
|
|
2052
|
+
* ```typescript
|
|
2053
|
+
* // Create a transform node with auto mode (default) - uses streaming for large files
|
|
2054
|
+
* const resizeNode = yield* createTransformNode({
|
|
2055
|
+
* id: "resize-image",
|
|
2056
|
+
* name: "Resize Image",
|
|
2057
|
+
* description: "Resizes images to specified dimensions",
|
|
2058
|
+
* transform: (bytes, file) => {
|
|
2059
|
+
* // Your transformation logic here
|
|
2060
|
+
* return Effect.succeed(transformedBytes);
|
|
2061
|
+
* },
|
|
2062
|
+
* streamingTransform: (stream, file) => {
|
|
2063
|
+
* const transformed = Stream.map(stream, (chunk) => processChunk(chunk));
|
|
2064
|
+
* return Effect.succeed(transformed);
|
|
2065
|
+
* }
|
|
2066
|
+
* });
|
|
2067
|
+
*
|
|
2068
|
+
* // Force buffered mode for specific use cases
|
|
2069
|
+
* const bufferedNode = yield* createTransformNode({
|
|
2070
|
+
* id: "optimize-small",
|
|
2071
|
+
* name: "Optimize Small Files",
|
|
2072
|
+
* description: "Optimizes small files with buffered mode",
|
|
2073
|
+
* mode: "buffered",
|
|
2074
|
+
* transform: (bytes, file) => Effect.succeed(transformBytes(bytes)),
|
|
2075
|
+
* });
|
|
2076
|
+
*
|
|
2077
|
+
* // Force streaming mode for memory efficiency
|
|
2078
|
+
* const streamingNode = yield* createTransformNode({
|
|
2079
|
+
* id: "optimize-large",
|
|
2080
|
+
* name: "Optimize Large Files",
|
|
2081
|
+
* description: "Optimizes large files with streaming",
|
|
2082
|
+
* mode: "streaming",
|
|
2083
|
+
* streamingTransform: (stream, file) => {
|
|
2084
|
+
* const transformed = Stream.map(stream, (chunk) => processChunk(chunk));
|
|
2085
|
+
* return Effect.succeed(transformed);
|
|
2086
|
+
* }
|
|
2087
|
+
* });
|
|
2088
|
+
* ```
|
|
2089
|
+
*/
|
|
2090
|
+
declare function createTransformNode({
|
|
2091
|
+
id,
|
|
2092
|
+
name,
|
|
2093
|
+
description,
|
|
2094
|
+
outputTypeId,
|
|
2095
|
+
keepOutput,
|
|
2096
|
+
naming,
|
|
2097
|
+
nodeType: namingNodeType,
|
|
2098
|
+
nodeTypeId,
|
|
2099
|
+
namingVars,
|
|
2100
|
+
circuitBreaker,
|
|
2101
|
+
mode,
|
|
2102
|
+
streamingConfig,
|
|
2103
|
+
transform,
|
|
2104
|
+
streamingTransform
|
|
2105
|
+
}: TransformNodeConfig): Effect.Effect<FlowNodeData & {
|
|
2106
|
+
inputSchema: zod.ZodType<UploadFile, unknown, zod_v4_core0.$ZodTypeInternals<UploadFile, unknown>>;
|
|
2107
|
+
outputSchema: zod.ZodType<UploadFile, unknown, zod_v4_core0.$ZodTypeInternals<UploadFile, unknown>>;
|
|
2108
|
+
run: (args: {
|
|
2109
|
+
data: UploadFile;
|
|
2110
|
+
jobId: string;
|
|
2111
|
+
storageId: string;
|
|
2112
|
+
flowId: string;
|
|
2113
|
+
inputs?: Record<string, unknown>;
|
|
2114
|
+
clientId: string | null;
|
|
2115
|
+
}) => Effect.Effect<NodeExecutionResult<UploadFile>, UploadistaError, never>;
|
|
2116
|
+
condition?: {
|
|
2117
|
+
field: string;
|
|
2118
|
+
operator: string;
|
|
2119
|
+
value: unknown;
|
|
2120
|
+
};
|
|
2121
|
+
multiInput?: boolean;
|
|
2122
|
+
multiOutput?: boolean;
|
|
2123
|
+
pausable?: boolean;
|
|
2124
|
+
retry?: {
|
|
2125
|
+
maxRetries?: number;
|
|
2126
|
+
retryDelay?: number;
|
|
2127
|
+
exponentialBackoff?: boolean;
|
|
2128
|
+
};
|
|
2129
|
+
circuitBreaker?: FlowCircuitBreakerConfig;
|
|
2130
|
+
} & {
|
|
2131
|
+
type: NodeType;
|
|
2132
|
+
}, UploadistaError, UploadEngine>;
|
|
2133
|
+
//#endregion
|
|
2134
|
+
//#region src/flow/parallel-scheduler.d.ts
|
|
2135
|
+
/**
|
|
2136
|
+
* Represents a level in the execution hierarchy where all nodes can run in parallel.
|
|
2137
|
+
*
|
|
2138
|
+
* @property level - The execution level (0 = first to execute, higher = later)
|
|
2139
|
+
* @property nodes - Array of node IDs that can execute in parallel at this level
|
|
2140
|
+
*
|
|
2141
|
+
* @example
|
|
2142
|
+
* ```
|
|
2143
|
+
* Level 0: [input_node] (no dependencies)
|
|
2144
|
+
* Level 1: [resize, optimize] (all depend on level 0)
|
|
2145
|
+
* Level 2: [storage] (depends on level 1)
|
|
2146
|
+
* ```
|
|
2147
|
+
*/
|
|
2148
|
+
interface ExecutionLevel {
|
|
2149
|
+
level: number;
|
|
2150
|
+
nodes: string[];
|
|
2151
|
+
}
|
|
2152
|
+
/**
|
|
2153
|
+
* Configuration options for the ParallelScheduler.
|
|
2154
|
+
*
|
|
2155
|
+
* @property maxConcurrency - Maximum number of nodes to execute in parallel (default: 4)
|
|
2156
|
+
* Controls how many nodes run simultaneously within a level
|
|
2157
|
+
*
|
|
2158
|
+
* @example
|
|
2159
|
+
* ```typescript
|
|
2160
|
+
* const scheduler = new ParallelScheduler({ maxConcurrency: 8 });
|
|
2161
|
+
* ```
|
|
2162
|
+
*/
|
|
2163
|
+
interface ParallelSchedulerConfig {
|
|
2164
|
+
maxConcurrency?: number;
|
|
2165
|
+
}
|
|
2166
|
+
/**
|
|
2167
|
+
* Scheduler for executing flow nodes in parallel while respecting dependencies.
|
|
2168
|
+
*
|
|
2169
|
+
* The scheduler performs topological sorting to identify nodes that can run
|
|
2170
|
+
* concurrently, groups them into execution levels, and provides methods to
|
|
2171
|
+
* execute them with controlled concurrency using Effect.
|
|
2172
|
+
*
|
|
2173
|
+
* Key responsibilities:
|
|
2174
|
+
* - Analyze flow dependencies and detect cycles
|
|
2175
|
+
* - Group nodes into parallel execution levels
|
|
2176
|
+
* - Execute levels in parallel with concurrency limits
|
|
2177
|
+
* - Provide utilities to check parallel execution feasibility
|
|
2178
|
+
*/
|
|
2179
|
+
declare class ParallelScheduler {
|
|
2180
|
+
private maxConcurrency;
|
|
2181
|
+
/**
|
|
2182
|
+
* Creates a new ParallelScheduler instance.
|
|
2183
|
+
*
|
|
2184
|
+
* @param config - Configuration for the scheduler
|
|
2185
|
+
* @example
|
|
2186
|
+
* ```typescript
|
|
2187
|
+
* const scheduler = new ParallelScheduler({ maxConcurrency: 4 });
|
|
2188
|
+
* ```
|
|
2189
|
+
*/
|
|
2190
|
+
constructor(config?: ParallelSchedulerConfig);
|
|
2191
|
+
/**
|
|
2192
|
+
* Groups nodes into execution levels where nodes in the same level can run in parallel.
|
|
2193
|
+
*
|
|
2194
|
+
* Uses Kahn's algorithm to perform topological sorting with level identification.
|
|
2195
|
+
* Nodes are grouped by their distance from source nodes (input nodes with no dependencies).
|
|
2196
|
+
*
|
|
2197
|
+
* @param nodes - Array of flow nodes to analyze
|
|
2198
|
+
* @param edges - Array of edges defining dependencies between nodes
|
|
2199
|
+
* @returns Array of execution levels, ordered from 0 (no dependencies) onwards
|
|
2200
|
+
* @throws Error if a cycle is detected in the flow graph
|
|
2201
|
+
*
|
|
2202
|
+
* @example
|
|
2203
|
+
* ```typescript
|
|
2204
|
+
* const levels = scheduler.groupNodesByExecutionLevel(nodes, edges);
|
|
2205
|
+
* // levels = [
|
|
2206
|
+
* // { level: 0, nodes: ['input_1'] },
|
|
2207
|
+
* // { level: 1, nodes: ['resize_1', 'optimize_1'] },
|
|
2208
|
+
* // { level: 2, nodes: ['output_1'] }
|
|
2209
|
+
* // ]
|
|
2210
|
+
* ```
|
|
2211
|
+
*/
|
|
2212
|
+
groupNodesByExecutionLevel(nodes: FlowNode<unknown, unknown>[], edges: Array<{
|
|
2213
|
+
source: string;
|
|
2214
|
+
target: string;
|
|
2215
|
+
}>): ExecutionLevel[];
|
|
2216
|
+
/**
|
|
2217
|
+
* Executes a batch of Effect-based node executors in parallel with concurrency control.
|
|
2218
|
+
*
|
|
2219
|
+
* All executors are run in parallel, but the number of concurrent executions is limited
|
|
2220
|
+
* by maxConcurrency. This prevents resource exhaustion while maximizing parallelism.
|
|
2221
|
+
*
|
|
2222
|
+
* @template T - The return type of each executor
|
|
2223
|
+
* @template E - The error type of the Effects
|
|
2224
|
+
* @template R - The requirements type of the Effects
|
|
2225
|
+
*
|
|
2226
|
+
* @param nodeExecutors - Array of Effect-returning functions to execute in parallel
|
|
2227
|
+
* @returns Effect that resolves to array of results in the same order as input
|
|
2228
|
+
*
|
|
2229
|
+
* @example
|
|
2230
|
+
* ```typescript
|
|
2231
|
+
* const results = yield* scheduler.executeNodesInParallel([
|
|
2232
|
+
* () => executeNode("node1"),
|
|
2233
|
+
* () => executeNode("node2"),
|
|
2234
|
+
* () => executeNode("node3")
|
|
2235
|
+
* ]);
|
|
2236
|
+
* // results will be in order: [result1, result2, result3]
|
|
2237
|
+
* ```
|
|
2238
|
+
*/
|
|
2239
|
+
executeNodesInParallel<T, E, R>(nodeExecutors: Array<() => Effect.Effect<T, E, R>>): Effect.Effect<T[], E, R>;
|
|
2240
|
+
/**
|
|
2241
|
+
* Determines if a set of nodes can be safely executed in parallel.
|
|
2242
|
+
*
|
|
2243
|
+
* Nodes can execute in parallel if all their dependencies have been completed.
|
|
2244
|
+
* This is typically called to verify that nodes in an execution level are ready
|
|
2245
|
+
* to run given the current node results.
|
|
2246
|
+
*
|
|
2247
|
+
* @param nodeIds - Array of node IDs to check
|
|
2248
|
+
* @param nodeResults - Map of completed node IDs to their results
|
|
2249
|
+
* @param reverseGraph - Dependency graph mapping node IDs to their incoming dependencies
|
|
2250
|
+
* @returns true if all dependencies for all nodes are in nodeResults, false otherwise
|
|
2251
|
+
*
|
|
2252
|
+
* @example
|
|
2253
|
+
* ```typescript
|
|
2254
|
+
* const canRun = scheduler.canExecuteInParallel(
|
|
2255
|
+
* ['resize_1', 'optimize_1'],
|
|
2256
|
+
* nodeResults,
|
|
2257
|
+
* reverseGraph
|
|
2258
|
+
* );
|
|
2259
|
+
* ```
|
|
2260
|
+
*/
|
|
2261
|
+
canExecuteInParallel(nodeIds: string[], nodeResults: Map<string, unknown>, reverseGraph: Record<string, string[]>): boolean;
|
|
2262
|
+
/**
|
|
2263
|
+
* Gets execution statistics for monitoring and debugging.
|
|
2264
|
+
*
|
|
2265
|
+
* @returns Object containing current scheduler configuration
|
|
2266
|
+
*
|
|
2267
|
+
* @example
|
|
2268
|
+
* ```typescript
|
|
2269
|
+
* const stats = scheduler.getStats();
|
|
2270
|
+
* console.log(`Max concurrency: ${stats.maxConcurrency}`);
|
|
2271
|
+
* ```
|
|
2272
|
+
*/
|
|
2273
|
+
getStats(): {
|
|
2274
|
+
maxConcurrency: number;
|
|
2275
|
+
};
|
|
2276
|
+
}
|
|
2277
|
+
//#endregion
|
|
2278
|
+
//#region src/flow/plugins/credential-provider.d.ts
|
|
2279
|
+
/**
|
|
2280
|
+
* Shape definition for the Credential Provider interface.
|
|
2281
|
+
* Defines the contract for retrieving credentials for various services.
|
|
2282
|
+
*/
|
|
2283
|
+
interface CredentialProviderShape {
|
|
2284
|
+
/**
|
|
2285
|
+
* Retrieves credentials for a specific service and client.
|
|
2286
|
+
*
|
|
2287
|
+
* @param params - Parameters for credential retrieval
|
|
2288
|
+
* @param params.clientId - Unique identifier for the client, or null if not available
|
|
2289
|
+
* @param params.serviceType - Optional service type to get specific credentials for
|
|
2290
|
+
* @returns An Effect that resolves to a record of credential key-value pairs
|
|
2291
|
+
* @throws {UploadistaError} When credential retrieval fails
|
|
2292
|
+
*/
|
|
2293
|
+
getCredential: (params: {
|
|
2294
|
+
clientId: string | null;
|
|
2295
|
+
serviceType?: string;
|
|
2296
|
+
}) => Effect.Effect<Record<string, unknown>, UploadistaError>;
|
|
2297
|
+
}
|
|
2298
|
+
declare const CredentialProvider_base: Context.TagClass<CredentialProvider, "CredentialProvider", CredentialProviderShape>;
|
|
2299
|
+
/**
|
|
2300
|
+
* Context tag for the Credential Provider.
|
|
2301
|
+
*
|
|
2302
|
+
* This tag provides a type-safe way to access credential functionality
|
|
2303
|
+
* throughout the application using Effect's dependency injection system.
|
|
2304
|
+
*
|
|
2305
|
+
* @example
|
|
2306
|
+
* ```typescript
|
|
2307
|
+
* import { CredentialProvider } from "@uploadista/core/flow/plugins";
|
|
2308
|
+
*
|
|
2309
|
+
* // In your flow node
|
|
2310
|
+
* const program = Effect.gen(function* () {
|
|
2311
|
+
* const credentialProvider = yield* CredentialProvider;
|
|
2312
|
+
* const credentials = yield* credentialProvider.getCredential({
|
|
2313
|
+
* clientId: "user123",
|
|
2314
|
+
* serviceType: "replicate"
|
|
2315
|
+
* });
|
|
2316
|
+
* return credentials;
|
|
2317
|
+
* });
|
|
2318
|
+
* ```
|
|
2319
|
+
*/
|
|
2320
|
+
declare class CredentialProvider extends CredentialProvider_base {}
|
|
2321
|
+
type CredentialProviderLayer = Layer.Layer<CredentialProvider, never, never>;
|
|
2322
|
+
//#endregion
|
|
2323
|
+
//#region src/flow/plugins/document-ai-plugin.d.ts
|
|
2324
|
+
/**
|
|
2325
|
+
* Context information for AI document processing operations.
|
|
2326
|
+
* Contains client identification and credentials for tracking and billing purposes.
|
|
2327
|
+
*/
|
|
2328
|
+
type DocumentAiContext = {
|
|
2329
|
+
/** Unique identifier for the client making the request, or null if not available */clientId: string | null; /** Credential ID for accessing the AI service (e.g., Replicate API key) */
|
|
2330
|
+
credentialId?: string;
|
|
2331
|
+
};
|
|
2332
|
+
/**
|
|
2333
|
+
* Task types supported by OCR operations.
|
|
2334
|
+
*/
|
|
2335
|
+
type OcrTaskType = "convertToMarkdown" | "freeOcr" | "parseFigure" | "locateObject";
|
|
2336
|
+
/**
|
|
2337
|
+
* Resolution options for OCR processing.
|
|
2338
|
+
* Higher resolutions provide better accuracy but slower processing.
|
|
2339
|
+
*/
|
|
2340
|
+
type OcrResolution = "tiny" | "small" | "base" | "gundam" | "large";
|
|
2341
|
+
/**
|
|
2342
|
+
* Parameters for OCR operations.
|
|
2343
|
+
*/
|
|
2344
|
+
type OcrParams = {
|
|
2345
|
+
/**
|
|
2346
|
+
* Type of OCR task to perform.
|
|
2347
|
+
* - "convertToMarkdown": Convert document to structured Markdown
|
|
2348
|
+
* - "freeOcr": Extract all visible text without structure
|
|
2349
|
+
* - "parseFigure": Analyze charts and diagrams
|
|
2350
|
+
* - "locateObject": Find specific content using reference text
|
|
2351
|
+
*/
|
|
2352
|
+
taskType: OcrTaskType;
|
|
2353
|
+
/**
|
|
2354
|
+
* Resolution size for processing.
|
|
2355
|
+
* Affects speed/accuracy tradeoff.
|
|
2356
|
+
* Default: "gundam" (recommended)
|
|
2357
|
+
*/
|
|
2358
|
+
resolution?: OcrResolution;
|
|
2359
|
+
/**
|
|
2360
|
+
* Reference text for object location tasks.
|
|
2361
|
+
* Only used when taskType is "locateObject".
|
|
2362
|
+
*/
|
|
2363
|
+
referenceText?: string;
|
|
2364
|
+
};
|
|
2365
|
+
/**
|
|
2366
|
+
* Result of an OCR operation.
|
|
2367
|
+
*/
|
|
2368
|
+
type OcrResult = {
|
|
2369
|
+
/**
|
|
2370
|
+
* The extracted text content.
|
|
2371
|
+
*/
|
|
2372
|
+
extractedText: string;
|
|
2373
|
+
/**
|
|
2374
|
+
* Format of the extracted text.
|
|
2375
|
+
* - "markdown": Structured markdown format
|
|
2376
|
+
* - "plain": Unstructured plain text
|
|
2377
|
+
* - "structured": Structured analysis (for figures)
|
|
2378
|
+
*/
|
|
2379
|
+
format: "markdown" | "plain" | "structured";
|
|
2380
|
+
/**
|
|
2381
|
+
* Confidence score (0-1) if provided by the service.
|
|
2382
|
+
*/
|
|
2383
|
+
confidence?: number;
|
|
2384
|
+
};
|
|
2385
|
+
/**
|
|
2386
|
+
* Shape definition for the Document AI Plugin interface.
|
|
2387
|
+
* Defines the contract that all document AI implementations must follow.
|
|
2388
|
+
*/
|
|
2389
|
+
type DocumentAiPluginShape = {
|
|
2390
|
+
/**
|
|
2391
|
+
* Performs OCR on a document image or scanned PDF using AI.
|
|
2392
|
+
*
|
|
2393
|
+
* @param inputUrl - The URL of the input document/image to process
|
|
2394
|
+
* @param params - OCR parameters including task type and resolution
|
|
2395
|
+
* @param context - Context information including client ID for tracking
|
|
2396
|
+
* @returns An Effect that resolves to OcrResult with extracted text
|
|
2397
|
+
* @throws {UploadistaError} When OCR operation fails
|
|
2398
|
+
*/
|
|
2399
|
+
performOCR: (inputUrl: string, params: OcrParams, context: DocumentAiContext) => Effect.Effect<OcrResult, UploadistaError>;
|
|
2400
|
+
};
|
|
2401
|
+
declare const DocumentAiPlugin_base: Context.TagClass<DocumentAiPlugin, "DocumentAiPlugin", DocumentAiPluginShape>;
|
|
2402
|
+
/**
|
|
2403
|
+
* Context tag for the Document AI Plugin.
|
|
2404
|
+
*
|
|
2405
|
+
* This tag provides a type-safe way to access document AI functionality
|
|
2406
|
+
* throughout the application using Effect's dependency injection system.
|
|
2407
|
+
*
|
|
2408
|
+
* @example
|
|
2409
|
+
* ```typescript
|
|
2410
|
+
* import { DocumentAiPlugin } from "@uploadista/core/flow/plugins";
|
|
2411
|
+
*
|
|
2412
|
+
* // In your flow node
|
|
2413
|
+
* const program = Effect.gen(function* () {
|
|
2414
|
+
* const documentAi = yield* DocumentAiPlugin;
|
|
2415
|
+
* const result = yield* documentAi.performOCR(
|
|
2416
|
+
* documentUrl,
|
|
2417
|
+
* { taskType: "convertToMarkdown", resolution: "gundam" },
|
|
2418
|
+
* { clientId: "user123" }
|
|
2419
|
+
* );
|
|
2420
|
+
* return result.extractedText;
|
|
2421
|
+
* });
|
|
2422
|
+
* ```
|
|
2423
|
+
*/
|
|
2424
|
+
declare class DocumentAiPlugin extends DocumentAiPlugin_base {}
|
|
2425
|
+
type DocumentAiPluginLayer = Layer.Layer<DocumentAiPlugin, never, never>;
|
|
2426
|
+
//#endregion
|
|
2427
|
+
//#region src/flow/plugins/document-plugin.d.ts
|
|
2428
|
+
/**
|
|
2429
|
+
* Parameters for splitting a PDF document.
|
|
2430
|
+
*/
|
|
2431
|
+
type SplitPdfParams = {
|
|
2432
|
+
/**
|
|
2433
|
+
* Mode of split operation.
|
|
2434
|
+
* - "range": Extract a contiguous range of pages
|
|
2435
|
+
* - "individual": Split into individual single-page PDFs
|
|
2436
|
+
*/
|
|
2437
|
+
mode: "range" | "individual";
|
|
2438
|
+
/**
|
|
2439
|
+
* Starting page number (1-indexed).
|
|
2440
|
+
* Only used in "range" mode.
|
|
2441
|
+
*/
|
|
2442
|
+
startPage?: number;
|
|
2443
|
+
/**
|
|
2444
|
+
* Ending page number (1-indexed, inclusive).
|
|
2445
|
+
* Only used in "range" mode.
|
|
2446
|
+
*/
|
|
2447
|
+
endPage?: number;
|
|
2448
|
+
};
|
|
2449
|
+
/**
|
|
2450
|
+
* Result of a split PDF operation.
|
|
2451
|
+
* In "range" mode, returns a single PDF.
|
|
2452
|
+
* In "individual" mode, returns an array of single-page PDFs.
|
|
2453
|
+
*/
|
|
2454
|
+
type SplitPdfResult = {
|
|
2455
|
+
mode: "range";
|
|
2456
|
+
pdf: Uint8Array;
|
|
2457
|
+
} | {
|
|
2458
|
+
mode: "individual";
|
|
2459
|
+
pdfs: Uint8Array[];
|
|
2460
|
+
};
|
|
2461
|
+
/**
|
|
2462
|
+
* Parameters for merging multiple PDF documents.
|
|
2463
|
+
*/
|
|
2464
|
+
type MergePdfParams = {
|
|
2465
|
+
/**
|
|
2466
|
+
* Array of PDF documents to merge (in order).
|
|
2467
|
+
*/
|
|
2468
|
+
pdfs: Uint8Array[];
|
|
2469
|
+
};
|
|
2470
|
+
/**
|
|
2471
|
+
* Metadata extracted from a PDF document.
|
|
2472
|
+
*/
|
|
2473
|
+
type DocumentMetadata = {
|
|
2474
|
+
/**
|
|
2475
|
+
* Total number of pages in the document.
|
|
2476
|
+
*/
|
|
2477
|
+
pageCount: number;
|
|
2478
|
+
/**
|
|
2479
|
+
* Document format (e.g., "pdf").
|
|
2480
|
+
*/
|
|
2481
|
+
format: string;
|
|
2482
|
+
/**
|
|
2483
|
+
* Author of the document (if available).
|
|
2484
|
+
*/
|
|
2485
|
+
author: string | null;
|
|
2486
|
+
/**
|
|
2487
|
+
* Title of the document (if available).
|
|
2488
|
+
*/
|
|
2489
|
+
title: string | null;
|
|
2490
|
+
/**
|
|
2491
|
+
* Subject of the document (if available).
|
|
2492
|
+
*/
|
|
2493
|
+
subject: string | null;
|
|
2494
|
+
/**
|
|
2495
|
+
* Creator application (if available).
|
|
2496
|
+
*/
|
|
2497
|
+
creator: string | null;
|
|
2498
|
+
/**
|
|
2499
|
+
* Creation date in ISO 8601 format (if available).
|
|
2500
|
+
*/
|
|
2501
|
+
creationDate: string | null;
|
|
2502
|
+
/**
|
|
2503
|
+
* Last modification date in ISO 8601 format (if available).
|
|
2504
|
+
*/
|
|
2505
|
+
modifiedDate: string | null;
|
|
2506
|
+
/**
|
|
2507
|
+
* File size in bytes.
|
|
2508
|
+
*/
|
|
2509
|
+
fileSize: number;
|
|
2510
|
+
};
|
|
2511
|
+
/**
|
|
2512
|
+
* Shape definition for the Document Plugin interface.
|
|
2513
|
+
* Defines the contract that all document processing implementations must follow.
|
|
2514
|
+
*/
|
|
2515
|
+
type DocumentPluginShape = {
|
|
2516
|
+
/**
|
|
2517
|
+
* Extracts plain text from a searchable PDF document.
|
|
2518
|
+
*
|
|
2519
|
+
* @param input - The input PDF as a Uint8Array
|
|
2520
|
+
* @returns An Effect that resolves to the extracted text as a string
|
|
2521
|
+
* @throws {UploadistaError} When text extraction fails (e.g., PDF_ENCRYPTED, PDF_CORRUPTED)
|
|
2522
|
+
*/
|
|
2523
|
+
extractText: (input: Uint8Array) => Effect.Effect<string, UploadistaError>;
|
|
2524
|
+
/**
|
|
2525
|
+
* Splits a PDF document by page range or into individual pages.
|
|
2526
|
+
*
|
|
2527
|
+
* @param input - The input PDF as a Uint8Array
|
|
2528
|
+
* @param options - Split parameters including mode and page range
|
|
2529
|
+
* @returns An Effect that resolves to either a single PDF or array of PDFs
|
|
2530
|
+
* @throws {UploadistaError} When splitting fails (e.g., PAGE_RANGE_INVALID)
|
|
2531
|
+
*/
|
|
2532
|
+
splitPdf: (input: Uint8Array, options: SplitPdfParams) => Effect.Effect<SplitPdfResult, UploadistaError>;
|
|
2533
|
+
/**
|
|
2534
|
+
* Merges multiple PDF documents into a single document.
|
|
2535
|
+
*
|
|
2536
|
+
* @param options - Merge parameters including array of PDFs to merge
|
|
2537
|
+
* @returns An Effect that resolves to the merged PDF as a Uint8Array
|
|
2538
|
+
* @throws {UploadistaError} When merging fails
|
|
2539
|
+
*/
|
|
2540
|
+
mergePdfs: (options: MergePdfParams) => Effect.Effect<Uint8Array, UploadistaError>;
|
|
2541
|
+
/**
|
|
2542
|
+
* Extracts metadata from a PDF document.
|
|
2543
|
+
*
|
|
2544
|
+
* @param input - The input PDF as a Uint8Array
|
|
2545
|
+
* @returns An Effect that resolves to DocumentMetadata with comprehensive document information
|
|
2546
|
+
* @throws {UploadistaError} When metadata extraction fails
|
|
2547
|
+
*/
|
|
2548
|
+
getMetadata: (input: Uint8Array) => Effect.Effect<DocumentMetadata, UploadistaError>;
|
|
2549
|
+
};
|
|
2550
|
+
declare const DocumentPlugin_base: Context.TagClass<DocumentPlugin, "DocumentPlugin", DocumentPluginShape>;
|
|
2551
|
+
/**
|
|
2552
|
+
* Context tag for the Document Plugin.
|
|
2553
|
+
*
|
|
2554
|
+
* This tag provides a type-safe way to access document processing functionality
|
|
2555
|
+
* throughout the application using Effect's dependency injection system.
|
|
2556
|
+
*
|
|
2557
|
+
* @example
|
|
2558
|
+
* ```typescript
|
|
2559
|
+
* import { DocumentPlugin } from "@uploadista/core/flow/plugins";
|
|
2560
|
+
*
|
|
2561
|
+
* // In your flow node
|
|
2562
|
+
* const program = Effect.gen(function* () {
|
|
2563
|
+
* const documentPlugin = yield* DocumentPlugin;
|
|
2564
|
+
* const text = yield* documentPlugin.extractText(pdfData);
|
|
2565
|
+
* const metadata = yield* documentPlugin.getMetadata(pdfData);
|
|
2566
|
+
* return { text, metadata };
|
|
2567
|
+
* });
|
|
2568
|
+
* ```
|
|
2569
|
+
*/
|
|
2570
|
+
declare class DocumentPlugin extends DocumentPlugin_base {}
|
|
2571
|
+
type DocumentPluginLayer = Layer.Layer<DocumentPlugin, never, never>;
|
|
2572
|
+
//#endregion
|
|
2573
|
+
//#region src/flow/plugins/image-ai-plugin.d.ts
|
|
2574
|
+
/**
|
|
2575
|
+
* Context information for AI image processing operations.
|
|
2576
|
+
* Contains client identification for tracking and billing purposes.
|
|
2577
|
+
*/
|
|
2578
|
+
type ImageAiContext = {
|
|
2579
|
+
/** Unique identifier for the client making the request, or null if not available */clientId: string | null;
|
|
2580
|
+
};
|
|
2581
|
+
/**
|
|
2582
|
+
* Shape definition for the Image AI Plugin interface.
|
|
2583
|
+
* Defines the contract that all image AI implementations must follow.
|
|
2584
|
+
*/
|
|
2585
|
+
type ImageAiPluginShape = {
|
|
2586
|
+
/**
|
|
2587
|
+
* Removes the background from an image using AI processing.
|
|
2588
|
+
*
|
|
2589
|
+
* @param inputUrl - The URL of the input image to process
|
|
2590
|
+
* @param context - Context information including client ID for tracking
|
|
2591
|
+
* @returns An Effect that resolves to an object containing the output image URL
|
|
2592
|
+
* @throws {UploadistaError} When the background removal fails
|
|
2593
|
+
*/
|
|
2594
|
+
removeBackground: (inputUrl: string, context: ImageAiContext) => Effect.Effect<{
|
|
2595
|
+
outputUrl: string;
|
|
2596
|
+
}, UploadistaError>;
|
|
2597
|
+
/**
|
|
2598
|
+
* Generates a textual description of an image using AI analysis.
|
|
2599
|
+
*
|
|
2600
|
+
* @param inputUrl - The URL of the input image to analyze
|
|
2601
|
+
* @param context - Context information including client ID for tracking
|
|
2602
|
+
* @returns An Effect that resolves to an object containing the image description
|
|
2603
|
+
* @throws {UploadistaError} When the image analysis fails
|
|
2604
|
+
*/
|
|
2605
|
+
describeImage: (inputUrl: string, context: ImageAiContext) => Effect.Effect<{
|
|
2606
|
+
description: string;
|
|
2607
|
+
}, UploadistaError>;
|
|
2608
|
+
};
|
|
2609
|
+
declare const ImageAiPlugin_base: Context.TagClass<ImageAiPlugin, "ImageAiPlugin", ImageAiPluginShape>;
|
|
2610
|
+
/**
|
|
2611
|
+
* Context tag for the Image AI Plugin.
|
|
2612
|
+
*
|
|
2613
|
+
* This tag provides a type-safe way to access image AI functionality
|
|
2614
|
+
* throughout the application using Effect's dependency injection system.
|
|
2615
|
+
*
|
|
2616
|
+
* @example
|
|
2617
|
+
* ```typescript
|
|
2618
|
+
* import { ImageAiPlugin } from "@uploadista/core/flow/plugins";
|
|
2619
|
+
*
|
|
2620
|
+
* // In your flow node
|
|
2621
|
+
* const program = Effect.gen(function* () {
|
|
2622
|
+
* const imageAi = yield* ImageAiPlugin;
|
|
2623
|
+
* const result = yield* imageAi.removeBackground(imageUrl, { clientId: "user123" });
|
|
2624
|
+
* return result.outputUrl;
|
|
2625
|
+
* });
|
|
2626
|
+
* ```
|
|
2627
|
+
*/
|
|
2628
|
+
declare class ImageAiPlugin extends ImageAiPlugin_base {}
|
|
2629
|
+
type ImageAiPluginLayer = Layer.Layer<ImageAiPlugin, never, never>;
|
|
2630
|
+
//#endregion
|
|
2631
|
+
//#region src/flow/plugins/types/optimize-node.d.ts
|
|
2632
|
+
/**
|
|
2633
|
+
* Zod schema for validating image optimization parameters.
|
|
2634
|
+
* Defines the structure and validation rules for image optimization requests.
|
|
2635
|
+
*/
|
|
2636
|
+
declare const optimizeParamsSchema: z.ZodObject<{
|
|
2637
|
+
quality: z.ZodNumber;
|
|
2638
|
+
format: z.ZodEnum<{
|
|
2639
|
+
webp: "webp";
|
|
2640
|
+
jpeg: "jpeg";
|
|
2641
|
+
png: "png";
|
|
2642
|
+
avif: "avif";
|
|
2643
|
+
}>;
|
|
2644
|
+
}, z.core.$strip>;
|
|
2645
|
+
/**
|
|
2646
|
+
* Parameters for the image optimization node.
|
|
2647
|
+
* Controls quality and format settings for image optimization.
|
|
2648
|
+
*/
|
|
2649
|
+
type OptimizeParams = z.infer<typeof optimizeParamsSchema>;
|
|
2650
|
+
//#endregion
|
|
2651
|
+
//#region src/flow/plugins/types/resize-node.d.ts
|
|
2652
|
+
/**
|
|
2653
|
+
* Zod schema for validating image resize parameters.
|
|
2654
|
+
* Defines the structure and validation rules for image resizing requests.
|
|
2655
|
+
* Requires at least one dimension (width or height) to be specified.
|
|
2656
|
+
*/
|
|
2657
|
+
declare const resizeParamsSchema: z.ZodObject<{
|
|
2658
|
+
width: z.ZodOptional<z.ZodNumber>;
|
|
2659
|
+
height: z.ZodOptional<z.ZodNumber>;
|
|
2660
|
+
fit: z.ZodEnum<{
|
|
2661
|
+
fill: "fill";
|
|
2662
|
+
contain: "contain";
|
|
2663
|
+
cover: "cover";
|
|
2664
|
+
}>;
|
|
2665
|
+
}, z.core.$strip>;
|
|
2666
|
+
/**
|
|
2667
|
+
* Parameters for the image resize node.
|
|
2668
|
+
* Controls the target dimensions and fitting behavior for image resizing.
|
|
2669
|
+
*/
|
|
2670
|
+
type ResizeParams = z.infer<typeof resizeParamsSchema>;
|
|
2671
|
+
//#endregion
|
|
2672
|
+
//#region src/flow/plugins/types/transform-image-node.d.ts
|
|
2673
|
+
/**
|
|
2674
|
+
* Type of transformation to apply to an image.
|
|
2675
|
+
*/
|
|
2676
|
+
type TransformationType = "resize" | "blur" | "rotate" | "flip" | "grayscale" | "sepia" | "brightness" | "contrast" | "sharpen" | "watermark" | "logo" | "text";
|
|
2677
|
+
/**
|
|
2678
|
+
* Resize transformation parameters.
|
|
2679
|
+
* Resizes the image to the specified dimensions with the given fit mode.
|
|
2680
|
+
*/
|
|
2681
|
+
declare const resizeTransformSchema: z.ZodObject<{
|
|
2682
|
+
type: z.ZodLiteral<"resize">;
|
|
2683
|
+
width: z.ZodOptional<z.ZodNumber>;
|
|
2684
|
+
height: z.ZodOptional<z.ZodNumber>;
|
|
2685
|
+
fit: z.ZodEnum<{
|
|
2686
|
+
fill: "fill";
|
|
2687
|
+
contain: "contain";
|
|
2688
|
+
cover: "cover";
|
|
2689
|
+
}>;
|
|
2690
|
+
}, z.core.$strip>;
|
|
2691
|
+
type ResizeTransform = z.infer<typeof resizeTransformSchema>;
|
|
2692
|
+
/**
|
|
2693
|
+
* Blur transformation parameters.
|
|
2694
|
+
* Applies Gaussian blur to the image.
|
|
2695
|
+
*/
|
|
2696
|
+
declare const blurTransformSchema: z.ZodObject<{
|
|
2697
|
+
type: z.ZodLiteral<"blur">;
|
|
2698
|
+
sigma: z.ZodNumber;
|
|
2699
|
+
}, z.core.$strip>;
|
|
2700
|
+
type BlurTransform = z.infer<typeof blurTransformSchema>;
|
|
2701
|
+
/**
|
|
2702
|
+
* Rotate transformation parameters.
|
|
2703
|
+
* Rotates the image by the specified angle.
|
|
2704
|
+
*/
|
|
2705
|
+
declare const rotateTransformSchema: z.ZodObject<{
|
|
2706
|
+
type: z.ZodLiteral<"rotate">;
|
|
2707
|
+
angle: z.ZodNumber;
|
|
2708
|
+
background: z.ZodOptional<z.ZodString>;
|
|
2709
|
+
}, z.core.$strip>;
|
|
2710
|
+
type RotateTransform = z.infer<typeof rotateTransformSchema>;
|
|
2711
|
+
/**
|
|
2712
|
+
* Flip transformation parameters.
|
|
2713
|
+
* Flips the image horizontally or vertically.
|
|
2714
|
+
*/
|
|
2715
|
+
declare const flipTransformSchema: z.ZodObject<{
|
|
2716
|
+
type: z.ZodLiteral<"flip">;
|
|
2717
|
+
direction: z.ZodEnum<{
|
|
2718
|
+
horizontal: "horizontal";
|
|
2719
|
+
vertical: "vertical";
|
|
2720
|
+
}>;
|
|
2721
|
+
}, z.core.$strip>;
|
|
2722
|
+
type FlipTransform = z.infer<typeof flipTransformSchema>;
|
|
2723
|
+
/**
|
|
2724
|
+
* Grayscale transformation parameters.
|
|
2725
|
+
* Converts the image to grayscale.
|
|
2726
|
+
*/
|
|
2727
|
+
declare const grayscaleTransformSchema: z.ZodObject<{
|
|
2728
|
+
type: z.ZodLiteral<"grayscale">;
|
|
2729
|
+
}, z.core.$strip>;
|
|
2730
|
+
type GrayscaleTransform = z.infer<typeof grayscaleTransformSchema>;
|
|
2731
|
+
/**
|
|
2732
|
+
* Sepia transformation parameters.
|
|
2733
|
+
* Applies a sepia tone effect to the image.
|
|
2734
|
+
*/
|
|
2735
|
+
declare const sepiaTransformSchema: z.ZodObject<{
|
|
2736
|
+
type: z.ZodLiteral<"sepia">;
|
|
2737
|
+
}, z.core.$strip>;
|
|
2738
|
+
type SepiaTransform = z.infer<typeof sepiaTransformSchema>;
|
|
2739
|
+
/**
|
|
2740
|
+
* Brightness transformation parameters.
|
|
2741
|
+
* Adjusts the brightness of the image.
|
|
2742
|
+
*/
|
|
2743
|
+
declare const brightnessTransformSchema: z.ZodObject<{
|
|
2744
|
+
type: z.ZodLiteral<"brightness">;
|
|
2745
|
+
value: z.ZodNumber;
|
|
2746
|
+
}, z.core.$strip>;
|
|
2747
|
+
type BrightnessTransform = z.infer<typeof brightnessTransformSchema>;
|
|
2748
|
+
/**
|
|
2749
|
+
* Contrast transformation parameters.
|
|
2750
|
+
* Adjusts the contrast of the image.
|
|
2751
|
+
*/
|
|
2752
|
+
declare const contrastTransformSchema: z.ZodObject<{
|
|
2753
|
+
type: z.ZodLiteral<"contrast">;
|
|
2754
|
+
value: z.ZodNumber;
|
|
2755
|
+
}, z.core.$strip>;
|
|
2756
|
+
type ContrastTransform = z.infer<typeof contrastTransformSchema>;
|
|
2757
|
+
/**
|
|
2758
|
+
* Sharpen transformation parameters.
|
|
2759
|
+
* Applies sharpening to the image.
|
|
2760
|
+
*/
|
|
2761
|
+
declare const sharpenTransformSchema: z.ZodObject<{
|
|
2762
|
+
type: z.ZodLiteral<"sharpen">;
|
|
2763
|
+
sigma: z.ZodOptional<z.ZodNumber>;
|
|
2764
|
+
}, z.core.$strip>;
|
|
2765
|
+
type SharpenTransform = z.infer<typeof sharpenTransformSchema>;
|
|
2766
|
+
/**
|
|
2767
|
+
* Position for overlays (watermarks, logos, text).
|
|
2768
|
+
*/
|
|
2769
|
+
type OverlayPosition = "top-left" | "top-right" | "bottom-left" | "bottom-right" | "center";
|
|
2770
|
+
/**
|
|
2771
|
+
* Watermark transformation parameters.
|
|
2772
|
+
* Overlays a watermark image on the main image.
|
|
2773
|
+
*/
|
|
2774
|
+
declare const watermarkTransformSchema: z.ZodObject<{
|
|
2775
|
+
type: z.ZodLiteral<"watermark">;
|
|
2776
|
+
imagePath: z.ZodString;
|
|
2777
|
+
position: z.ZodEnum<{
|
|
2778
|
+
"top-left": "top-left";
|
|
2779
|
+
"top-right": "top-right";
|
|
2780
|
+
"bottom-left": "bottom-left";
|
|
2781
|
+
"bottom-right": "bottom-right";
|
|
2782
|
+
center: "center";
|
|
2783
|
+
}>;
|
|
2784
|
+
opacity: z.ZodNumber;
|
|
2785
|
+
offsetX: z.ZodOptional<z.ZodNumber>;
|
|
2786
|
+
offsetY: z.ZodOptional<z.ZodNumber>;
|
|
2787
|
+
}, z.core.$strip>;
|
|
2788
|
+
type WatermarkTransform = z.infer<typeof watermarkTransformSchema>;
|
|
2789
|
+
/**
|
|
2790
|
+
* Logo transformation parameters.
|
|
2791
|
+
* Overlays a logo image on the main image with scaling.
|
|
2792
|
+
*/
|
|
2793
|
+
declare const logoTransformSchema: z.ZodObject<{
|
|
2794
|
+
type: z.ZodLiteral<"logo">;
|
|
2795
|
+
imagePath: z.ZodString;
|
|
2796
|
+
position: z.ZodEnum<{
|
|
2797
|
+
"top-left": "top-left";
|
|
2798
|
+
"top-right": "top-right";
|
|
2799
|
+
"bottom-left": "bottom-left";
|
|
2800
|
+
"bottom-right": "bottom-right";
|
|
2801
|
+
center: "center";
|
|
2802
|
+
}>;
|
|
2803
|
+
scale: z.ZodNumber;
|
|
2804
|
+
offsetX: z.ZodOptional<z.ZodNumber>;
|
|
2805
|
+
offsetY: z.ZodOptional<z.ZodNumber>;
|
|
2806
|
+
}, z.core.$strip>;
|
|
2807
|
+
type LogoTransform = z.infer<typeof logoTransformSchema>;
|
|
2808
|
+
/**
|
|
2809
|
+
* Text transformation parameters.
|
|
2810
|
+
* Overlays text on the image.
|
|
2811
|
+
*/
|
|
2812
|
+
declare const textTransformSchema: z.ZodObject<{
|
|
2813
|
+
type: z.ZodLiteral<"text">;
|
|
2814
|
+
text: z.ZodString;
|
|
2815
|
+
position: z.ZodEnum<{
|
|
2816
|
+
"top-left": "top-left";
|
|
2817
|
+
"top-right": "top-right";
|
|
2818
|
+
"bottom-left": "bottom-left";
|
|
2819
|
+
"bottom-right": "bottom-right";
|
|
2820
|
+
center: "center";
|
|
2821
|
+
}>;
|
|
2822
|
+
fontSize: z.ZodNumber;
|
|
2823
|
+
color: z.ZodString;
|
|
2824
|
+
fontFamily: z.ZodOptional<z.ZodString>;
|
|
2825
|
+
offsetX: z.ZodOptional<z.ZodNumber>;
|
|
2826
|
+
offsetY: z.ZodOptional<z.ZodNumber>;
|
|
2827
|
+
}, z.core.$strip>;
|
|
2828
|
+
type TextTransform = z.infer<typeof textTransformSchema>;
|
|
2829
|
+
/**
|
|
2830
|
+
* Schema for validating any transformation type.
|
|
2831
|
+
* This is a discriminated union of all transformation schemas.
|
|
2832
|
+
*/
|
|
2833
|
+
declare const transformationSchema: z.ZodDiscriminatedUnion<[z.ZodObject<{
|
|
2834
|
+
type: z.ZodLiteral<"resize">;
|
|
2835
|
+
width: z.ZodOptional<z.ZodNumber>;
|
|
2836
|
+
height: z.ZodOptional<z.ZodNumber>;
|
|
2837
|
+
fit: z.ZodEnum<{
|
|
2838
|
+
fill: "fill";
|
|
2839
|
+
contain: "contain";
|
|
2840
|
+
cover: "cover";
|
|
2841
|
+
}>;
|
|
2842
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2843
|
+
type: z.ZodLiteral<"blur">;
|
|
2844
|
+
sigma: z.ZodNumber;
|
|
2845
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2846
|
+
type: z.ZodLiteral<"rotate">;
|
|
2847
|
+
angle: z.ZodNumber;
|
|
2848
|
+
background: z.ZodOptional<z.ZodString>;
|
|
2849
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2850
|
+
type: z.ZodLiteral<"flip">;
|
|
2851
|
+
direction: z.ZodEnum<{
|
|
2852
|
+
horizontal: "horizontal";
|
|
2853
|
+
vertical: "vertical";
|
|
2854
|
+
}>;
|
|
2855
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2856
|
+
type: z.ZodLiteral<"grayscale">;
|
|
2857
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2858
|
+
type: z.ZodLiteral<"sepia">;
|
|
2859
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2860
|
+
type: z.ZodLiteral<"brightness">;
|
|
2861
|
+
value: z.ZodNumber;
|
|
2862
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2863
|
+
type: z.ZodLiteral<"contrast">;
|
|
2864
|
+
value: z.ZodNumber;
|
|
2865
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2866
|
+
type: z.ZodLiteral<"sharpen">;
|
|
2867
|
+
sigma: z.ZodOptional<z.ZodNumber>;
|
|
2868
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2869
|
+
type: z.ZodLiteral<"watermark">;
|
|
2870
|
+
imagePath: z.ZodString;
|
|
2871
|
+
position: z.ZodEnum<{
|
|
2872
|
+
"top-left": "top-left";
|
|
2873
|
+
"top-right": "top-right";
|
|
2874
|
+
"bottom-left": "bottom-left";
|
|
2875
|
+
"bottom-right": "bottom-right";
|
|
2876
|
+
center: "center";
|
|
2877
|
+
}>;
|
|
2878
|
+
opacity: z.ZodNumber;
|
|
2879
|
+
offsetX: z.ZodOptional<z.ZodNumber>;
|
|
2880
|
+
offsetY: z.ZodOptional<z.ZodNumber>;
|
|
2881
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2882
|
+
type: z.ZodLiteral<"logo">;
|
|
2883
|
+
imagePath: z.ZodString;
|
|
2884
|
+
position: z.ZodEnum<{
|
|
2885
|
+
"top-left": "top-left";
|
|
2886
|
+
"top-right": "top-right";
|
|
2887
|
+
"bottom-left": "bottom-left";
|
|
2888
|
+
"bottom-right": "bottom-right";
|
|
2889
|
+
center: "center";
|
|
2890
|
+
}>;
|
|
2891
|
+
scale: z.ZodNumber;
|
|
2892
|
+
offsetX: z.ZodOptional<z.ZodNumber>;
|
|
2893
|
+
offsetY: z.ZodOptional<z.ZodNumber>;
|
|
2894
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2895
|
+
type: z.ZodLiteral<"text">;
|
|
2896
|
+
text: z.ZodString;
|
|
2897
|
+
position: z.ZodEnum<{
|
|
2898
|
+
"top-left": "top-left";
|
|
2899
|
+
"top-right": "top-right";
|
|
2900
|
+
"bottom-left": "bottom-left";
|
|
2901
|
+
"bottom-right": "bottom-right";
|
|
2902
|
+
center: "center";
|
|
2903
|
+
}>;
|
|
2904
|
+
fontSize: z.ZodNumber;
|
|
2905
|
+
color: z.ZodString;
|
|
2906
|
+
fontFamily: z.ZodOptional<z.ZodString>;
|
|
2907
|
+
offsetX: z.ZodOptional<z.ZodNumber>;
|
|
2908
|
+
offsetY: z.ZodOptional<z.ZodNumber>;
|
|
2909
|
+
}, z.core.$strip>], "type">;
|
|
2910
|
+
/**
|
|
2911
|
+
* A single image transformation operation.
|
|
2912
|
+
* This is a discriminated union type that can represent any transformation.
|
|
2913
|
+
*/
|
|
2914
|
+
type Transformation = z.infer<typeof transformationSchema>;
|
|
2915
|
+
/**
|
|
2916
|
+
* Parameters for the transform image node.
|
|
2917
|
+
* Contains an ordered array of transformations to apply sequentially.
|
|
2918
|
+
*/
|
|
2919
|
+
declare const transformImageParamsSchema: z.ZodObject<{
|
|
2920
|
+
transformations: z.ZodArray<z.ZodDiscriminatedUnion<[z.ZodObject<{
|
|
2921
|
+
type: z.ZodLiteral<"resize">;
|
|
2922
|
+
width: z.ZodOptional<z.ZodNumber>;
|
|
2923
|
+
height: z.ZodOptional<z.ZodNumber>;
|
|
2924
|
+
fit: z.ZodEnum<{
|
|
2925
|
+
fill: "fill";
|
|
2926
|
+
contain: "contain";
|
|
2927
|
+
cover: "cover";
|
|
2928
|
+
}>;
|
|
2929
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2930
|
+
type: z.ZodLiteral<"blur">;
|
|
2931
|
+
sigma: z.ZodNumber;
|
|
2932
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2933
|
+
type: z.ZodLiteral<"rotate">;
|
|
2934
|
+
angle: z.ZodNumber;
|
|
2935
|
+
background: z.ZodOptional<z.ZodString>;
|
|
2936
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2937
|
+
type: z.ZodLiteral<"flip">;
|
|
2938
|
+
direction: z.ZodEnum<{
|
|
2939
|
+
horizontal: "horizontal";
|
|
2940
|
+
vertical: "vertical";
|
|
2941
|
+
}>;
|
|
2942
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2943
|
+
type: z.ZodLiteral<"grayscale">;
|
|
2944
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2945
|
+
type: z.ZodLiteral<"sepia">;
|
|
2946
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2947
|
+
type: z.ZodLiteral<"brightness">;
|
|
2948
|
+
value: z.ZodNumber;
|
|
2949
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2950
|
+
type: z.ZodLiteral<"contrast">;
|
|
2951
|
+
value: z.ZodNumber;
|
|
2952
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2953
|
+
type: z.ZodLiteral<"sharpen">;
|
|
2954
|
+
sigma: z.ZodOptional<z.ZodNumber>;
|
|
2955
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2956
|
+
type: z.ZodLiteral<"watermark">;
|
|
2957
|
+
imagePath: z.ZodString;
|
|
2958
|
+
position: z.ZodEnum<{
|
|
2959
|
+
"top-left": "top-left";
|
|
2960
|
+
"top-right": "top-right";
|
|
2961
|
+
"bottom-left": "bottom-left";
|
|
2962
|
+
"bottom-right": "bottom-right";
|
|
2963
|
+
center: "center";
|
|
2964
|
+
}>;
|
|
2965
|
+
opacity: z.ZodNumber;
|
|
2966
|
+
offsetX: z.ZodOptional<z.ZodNumber>;
|
|
2967
|
+
offsetY: z.ZodOptional<z.ZodNumber>;
|
|
2968
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2969
|
+
type: z.ZodLiteral<"logo">;
|
|
2970
|
+
imagePath: z.ZodString;
|
|
2971
|
+
position: z.ZodEnum<{
|
|
2972
|
+
"top-left": "top-left";
|
|
2973
|
+
"top-right": "top-right";
|
|
2974
|
+
"bottom-left": "bottom-left";
|
|
2975
|
+
"bottom-right": "bottom-right";
|
|
2976
|
+
center: "center";
|
|
2977
|
+
}>;
|
|
2978
|
+
scale: z.ZodNumber;
|
|
2979
|
+
offsetX: z.ZodOptional<z.ZodNumber>;
|
|
2980
|
+
offsetY: z.ZodOptional<z.ZodNumber>;
|
|
2981
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
2982
|
+
type: z.ZodLiteral<"text">;
|
|
2983
|
+
text: z.ZodString;
|
|
2984
|
+
position: z.ZodEnum<{
|
|
2985
|
+
"top-left": "top-left";
|
|
2986
|
+
"top-right": "top-right";
|
|
2987
|
+
"bottom-left": "bottom-left";
|
|
2988
|
+
"bottom-right": "bottom-right";
|
|
2989
|
+
center: "center";
|
|
2990
|
+
}>;
|
|
2991
|
+
fontSize: z.ZodNumber;
|
|
2992
|
+
color: z.ZodString;
|
|
2993
|
+
fontFamily: z.ZodOptional<z.ZodString>;
|
|
2994
|
+
offsetX: z.ZodOptional<z.ZodNumber>;
|
|
2995
|
+
offsetY: z.ZodOptional<z.ZodNumber>;
|
|
2996
|
+
}, z.core.$strip>], "type">>;
|
|
2997
|
+
}, z.core.$strip>;
|
|
2998
|
+
/**
|
|
2999
|
+
* Parameters for the transform image node.
|
|
3000
|
+
*/
|
|
3001
|
+
type TransformImageParams = z.infer<typeof transformImageParamsSchema>;
|
|
3002
|
+
//#endregion
|
|
3003
|
+
//#region src/flow/plugins/image-plugin.d.ts
|
|
3004
|
+
/**
|
|
3005
|
+
* Shape definition for the Image Plugin interface.
|
|
3006
|
+
* Defines the contract that all image processing implementations must follow.
|
|
3007
|
+
*/
|
|
3008
|
+
type ImagePluginShape = {
|
|
3009
|
+
/**
|
|
3010
|
+
* Optimizes an image by adjusting quality and format.
|
|
3011
|
+
*
|
|
3012
|
+
* @param input - The input image as a Uint8Array
|
|
3013
|
+
* @param options - Optimization parameters including quality and format
|
|
3014
|
+
* @returns An Effect that resolves to the optimized image as a Uint8Array
|
|
3015
|
+
* @throws {UploadistaError} When image optimization fails
|
|
3016
|
+
*/
|
|
3017
|
+
optimize: (input: Uint8Array, options: OptimizeParams) => Effect.Effect<Uint8Array, UploadistaError>;
|
|
3018
|
+
/**
|
|
3019
|
+
* Resizes an image to specified dimensions.
|
|
3020
|
+
*
|
|
3021
|
+
* @param input - The input image as a Uint8Array
|
|
3022
|
+
* @param options - Resize parameters including width, height, and fit mode
|
|
3023
|
+
* @returns An Effect that resolves to the resized image as a Uint8Array
|
|
3024
|
+
* @throws {UploadistaError} When image resizing fails
|
|
3025
|
+
*/
|
|
3026
|
+
resize: (input: Uint8Array, options: ResizeParams) => Effect.Effect<Uint8Array, UploadistaError>;
|
|
3027
|
+
/**
|
|
3028
|
+
* Applies a single transformation to an image.
|
|
3029
|
+
*
|
|
3030
|
+
* This method is used by the transform image node to apply individual transformations
|
|
3031
|
+
* in a chain. Each transformation receives the output of the previous transformation.
|
|
3032
|
+
*
|
|
3033
|
+
* @param input - The input image as a Uint8Array
|
|
3034
|
+
* @param transformation - The transformation to apply (discriminated union)
|
|
3035
|
+
* @returns An Effect that resolves to the transformed image as a Uint8Array
|
|
3036
|
+
* @throws {UploadistaError} When transformation fails or is unsupported by the plugin
|
|
3037
|
+
*
|
|
3038
|
+
* @example
|
|
3039
|
+
* ```typescript
|
|
3040
|
+
* const program = Effect.gen(function* () {
|
|
3041
|
+
* const imagePlugin = yield* ImagePlugin;
|
|
3042
|
+
*
|
|
3043
|
+
* // Apply a single transformation
|
|
3044
|
+
* const blurred = yield* imagePlugin.transform(imageData, {
|
|
3045
|
+
* type: 'blur',
|
|
3046
|
+
* sigma: 5.0
|
|
3047
|
+
* });
|
|
3048
|
+
*
|
|
3049
|
+
* // Chain multiple transformations
|
|
3050
|
+
* const resized = yield* imagePlugin.transform(blurred, {
|
|
3051
|
+
* type: 'resize',
|
|
3052
|
+
* width: 800,
|
|
3053
|
+
* height: 600,
|
|
3054
|
+
* fit: 'cover'
|
|
3055
|
+
* });
|
|
3056
|
+
*
|
|
3057
|
+
* return resized;
|
|
3058
|
+
* });
|
|
3059
|
+
* ```
|
|
3060
|
+
*/
|
|
3061
|
+
transform: (input: Uint8Array, transformation: Transformation) => Effect.Effect<Uint8Array, UploadistaError>;
|
|
3062
|
+
/**
|
|
3063
|
+
* Optimizes an image using streaming for memory-efficient processing of large files.
|
|
3064
|
+
*
|
|
3065
|
+
* This method processes image data as a stream, which is beneficial for large images
|
|
3066
|
+
* where loading the entire file into memory would be problematic.
|
|
3067
|
+
*
|
|
3068
|
+
* Note: Image processing inherently requires decoding the full image, so memory
|
|
3069
|
+
* savings are primarily from avoiding double-buffering. The streaming interface
|
|
3070
|
+
* allows better pipeline integration with DataStore streaming reads.
|
|
3071
|
+
*
|
|
3072
|
+
* @param input - The input image as an Effect Stream of Uint8Array chunks
|
|
3073
|
+
* @param options - Optimization parameters including quality and format
|
|
3074
|
+
* @returns An Effect that resolves to a Stream of the optimized image bytes
|
|
3075
|
+
* @throws {UploadistaError} When image optimization fails
|
|
3076
|
+
*
|
|
3077
|
+
* @example
|
|
3078
|
+
* ```typescript
|
|
3079
|
+
* const program = Effect.gen(function* () {
|
|
3080
|
+
* const imagePlugin = yield* ImagePlugin;
|
|
3081
|
+
* const inputStream = yield* dataStore.readStream(fileId);
|
|
3082
|
+
* const outputStream = yield* imagePlugin.optimizeStream(inputStream, {
|
|
3083
|
+
* quality: 80,
|
|
3084
|
+
* format: "webp"
|
|
3085
|
+
* });
|
|
3086
|
+
* return outputStream;
|
|
3087
|
+
* });
|
|
3088
|
+
* ```
|
|
3089
|
+
*/
|
|
3090
|
+
optimizeStream?: (input: Stream.Stream<Uint8Array, UploadistaError>, options: OptimizeParams) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;
|
|
3091
|
+
/**
|
|
3092
|
+
* Resizes an image using streaming for memory-efficient processing of large files.
|
|
3093
|
+
*
|
|
3094
|
+
* This method processes image data as a stream. Like other image operations,
|
|
3095
|
+
* the full image must be decoded before processing, but the streaming interface
|
|
3096
|
+
* avoids double-buffering when combined with streaming DataStore reads and writes.
|
|
3097
|
+
*
|
|
3098
|
+
* @param input - The input image as an Effect Stream of Uint8Array chunks
|
|
3099
|
+
* @param options - Resize parameters including width, height, and fit mode
|
|
3100
|
+
* @returns An Effect that resolves to a Stream of the resized image bytes
|
|
3101
|
+
* @throws {UploadistaError} When image resizing fails
|
|
3102
|
+
*
|
|
3103
|
+
* @example
|
|
3104
|
+
* ```typescript
|
|
3105
|
+
* const program = Effect.gen(function* () {
|
|
3106
|
+
* const imagePlugin = yield* ImagePlugin;
|
|
3107
|
+
* const inputStream = yield* dataStore.readStream(fileId);
|
|
3108
|
+
* const outputStream = yield* imagePlugin.resizeStream(inputStream, {
|
|
3109
|
+
* width: 800,
|
|
3110
|
+
* height: 600,
|
|
3111
|
+
* fit: "cover"
|
|
3112
|
+
* });
|
|
3113
|
+
* return outputStream;
|
|
3114
|
+
* });
|
|
3115
|
+
* ```
|
|
3116
|
+
*/
|
|
3117
|
+
resizeStream?: (input: Stream.Stream<Uint8Array, UploadistaError>, options: ResizeParams) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;
|
|
3118
|
+
/**
|
|
3119
|
+
* Applies a single transformation using streaming for memory-efficient processing.
|
|
3120
|
+
*
|
|
3121
|
+
* This method processes image data as a stream. The streaming interface
|
|
3122
|
+
* allows better pipeline integration with DataStore streaming reads and writes,
|
|
3123
|
+
* reducing peak memory usage for large files.
|
|
3124
|
+
*
|
|
3125
|
+
* @param input - The input image as an Effect Stream of Uint8Array chunks
|
|
3126
|
+
* @param transformation - The transformation to apply
|
|
3127
|
+
* @returns An Effect that resolves to a Stream of the transformed image bytes
|
|
3128
|
+
* @throws {UploadistaError} When transformation fails
|
|
3129
|
+
*
|
|
3130
|
+
* @example
|
|
3131
|
+
* ```typescript
|
|
3132
|
+
* const program = Effect.gen(function* () {
|
|
3133
|
+
* const imagePlugin = yield* ImagePlugin;
|
|
3134
|
+
* const inputStream = yield* dataStore.readStream(fileId);
|
|
3135
|
+
* const outputStream = yield* imagePlugin.transformStream(inputStream, {
|
|
3136
|
+
* type: 'blur',
|
|
3137
|
+
* sigma: 5.0
|
|
3138
|
+
* });
|
|
3139
|
+
* return outputStream;
|
|
3140
|
+
* });
|
|
3141
|
+
* ```
|
|
3142
|
+
*/
|
|
3143
|
+
transformStream?: (input: Stream.Stream<Uint8Array, UploadistaError>, transformation: Transformation) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;
|
|
3144
|
+
/**
|
|
3145
|
+
* Indicates whether this plugin supports streaming operations.
|
|
3146
|
+
* Returns true if streaming methods (optimizeStream, resizeStream, transformStream) are available.
|
|
3147
|
+
*/
|
|
3148
|
+
supportsStreaming?: boolean;
|
|
3149
|
+
};
|
|
3150
|
+
declare const ImagePlugin_base: Context.TagClass<ImagePlugin, "ImagePlugin", ImagePluginShape>;
|
|
3151
|
+
/**
|
|
3152
|
+
* Context tag for the Image Plugin.
|
|
3153
|
+
*
|
|
3154
|
+
* This tag provides a type-safe way to access image processing functionality
|
|
3155
|
+
* throughout the application using Effect's dependency injection system.
|
|
3156
|
+
*
|
|
3157
|
+
* @example
|
|
3158
|
+
* ```typescript
|
|
3159
|
+
* import { ImagePlugin } from "@uploadista/core/flow/plugins";
|
|
3160
|
+
*
|
|
3161
|
+
* // In your flow node
|
|
3162
|
+
* const program = Effect.gen(function* () {
|
|
3163
|
+
* const imagePlugin = yield* ImagePlugin;
|
|
3164
|
+
* const optimized = yield* imagePlugin.optimize(imageData, { quality: 80, format: "webp" });
|
|
3165
|
+
* const resized = yield* imagePlugin.resize(optimized, { width: 800, height: 600, fit: "cover" });
|
|
3166
|
+
* return resized;
|
|
3167
|
+
* });
|
|
3168
|
+
* ```
|
|
3169
|
+
*/
|
|
3170
|
+
declare class ImagePlugin extends ImagePlugin_base {}
|
|
3171
|
+
type ImagePluginLayer = Layer.Layer<ImagePlugin, never, never>;
|
|
3172
|
+
//#endregion
|
|
3173
|
+
//#region src/flow/plugins/types/describe-video-node.d.ts
|
|
3174
|
+
/**
|
|
3175
|
+
* Zod schema for video metadata extracted by the describe operation.
|
|
3176
|
+
* Defines the structure and validation rules for video metadata.
|
|
3177
|
+
*/
|
|
3178
|
+
declare const describeVideoMetadataSchema: z.ZodObject<{
|
|
3179
|
+
duration: z.ZodNumber;
|
|
3180
|
+
width: z.ZodNumber;
|
|
3181
|
+
height: z.ZodNumber;
|
|
3182
|
+
codec: z.ZodString;
|
|
3183
|
+
format: z.ZodString;
|
|
3184
|
+
bitrate: z.ZodNumber;
|
|
3185
|
+
frameRate: z.ZodNumber;
|
|
3186
|
+
aspectRatio: z.ZodString;
|
|
3187
|
+
hasAudio: z.ZodBoolean;
|
|
3188
|
+
audioCodec: z.ZodOptional<z.ZodString>;
|
|
3189
|
+
audioBitrate: z.ZodOptional<z.ZodNumber>;
|
|
3190
|
+
size: z.ZodNumber;
|
|
3191
|
+
}, z.core.$strip>;
|
|
3192
|
+
/**
|
|
3193
|
+
* Video metadata extracted by the describe operation.
|
|
3194
|
+
* Contains comprehensive information about video properties, codecs, and audio.
|
|
3195
|
+
*/
|
|
3196
|
+
type DescribeVideoMetadata = z.infer<typeof describeVideoMetadataSchema>;
|
|
3197
|
+
//#endregion
|
|
3198
|
+
//#region src/flow/plugins/types/extract-frame-video-node.d.ts
|
|
3199
|
+
/**
|
|
3200
|
+
* Zod schema for validating video frame extraction parameters.
|
|
3201
|
+
* Defines the structure and validation rules for extracting a single frame from video.
|
|
3202
|
+
*/
|
|
3203
|
+
declare const extractFrameVideoParamsSchema: z.ZodObject<{
|
|
3204
|
+
timestamp: z.ZodNumber;
|
|
3205
|
+
format: z.ZodOptional<z.ZodEnum<{
|
|
3206
|
+
jpeg: "jpeg";
|
|
3207
|
+
png: "png";
|
|
3208
|
+
}>>;
|
|
3209
|
+
quality: z.ZodOptional<z.ZodNumber>;
|
|
3210
|
+
}, z.core.$strip>;
|
|
3211
|
+
/**
|
|
3212
|
+
* Parameters for the video frame extraction node.
|
|
3213
|
+
* Controls the timestamp and output format for extracting a single frame from video.
|
|
3214
|
+
*/
|
|
3215
|
+
type ExtractFrameVideoParams = z.infer<typeof extractFrameVideoParamsSchema>;
|
|
3216
|
+
//#endregion
|
|
3217
|
+
//#region src/flow/plugins/types/resize-video-node.d.ts
|
|
3218
|
+
/**
|
|
3219
|
+
* Zod schema for validating video resize parameters.
|
|
3220
|
+
* Defines the structure and validation rules for video resolution changes.
|
|
3221
|
+
* Requires at least one dimension (width or height) to be specified.
|
|
3222
|
+
*/
|
|
3223
|
+
declare const resizeVideoParamsSchema: z.ZodObject<{
|
|
3224
|
+
width: z.ZodOptional<z.ZodNumber>;
|
|
3225
|
+
height: z.ZodOptional<z.ZodNumber>;
|
|
3226
|
+
aspectRatio: z.ZodOptional<z.ZodEnum<{
|
|
3227
|
+
keep: "keep";
|
|
3228
|
+
ignore: "ignore";
|
|
3229
|
+
}>>;
|
|
3230
|
+
scaling: z.ZodOptional<z.ZodEnum<{
|
|
3231
|
+
bicubic: "bicubic";
|
|
3232
|
+
bilinear: "bilinear";
|
|
3233
|
+
lanczos: "lanczos";
|
|
3234
|
+
}>>;
|
|
3235
|
+
}, z.core.$strip>;
|
|
3236
|
+
/**
|
|
3237
|
+
* Parameters for the video resize node.
|
|
3238
|
+
* Controls the target dimensions and aspect ratio handling for video resizing.
|
|
3239
|
+
*/
|
|
3240
|
+
type ResizeVideoParams = z.infer<typeof resizeVideoParamsSchema>;
|
|
3241
|
+
//#endregion
|
|
3242
|
+
//#region src/flow/plugins/types/transcode-video-node.d.ts
|
|
3243
|
+
/**
|
|
3244
|
+
* Zod schema for validating video transcode parameters.
|
|
3245
|
+
* Defines the structure and validation rules for video format and codec conversion.
|
|
3246
|
+
*/
|
|
3247
|
+
declare const transcodeVideoParamsSchema: z.ZodObject<{
|
|
3248
|
+
format: z.ZodEnum<{
|
|
3249
|
+
mp4: "mp4";
|
|
3250
|
+
webm: "webm";
|
|
3251
|
+
mov: "mov";
|
|
3252
|
+
avi: "avi";
|
|
3253
|
+
}>;
|
|
3254
|
+
codec: z.ZodOptional<z.ZodEnum<{
|
|
3255
|
+
h264: "h264";
|
|
3256
|
+
h265: "h265";
|
|
3257
|
+
vp9: "vp9";
|
|
3258
|
+
av1: "av1";
|
|
3259
|
+
}>>;
|
|
3260
|
+
videoBitrate: z.ZodOptional<z.ZodString>;
|
|
3261
|
+
audioBitrate: z.ZodOptional<z.ZodString>;
|
|
3262
|
+
audioCodec: z.ZodOptional<z.ZodEnum<{
|
|
3263
|
+
aac: "aac";
|
|
3264
|
+
mp3: "mp3";
|
|
3265
|
+
opus: "opus";
|
|
3266
|
+
vorbis: "vorbis";
|
|
3267
|
+
}>>;
|
|
3268
|
+
}, z.core.$strip>;
|
|
3269
|
+
/**
|
|
3270
|
+
* Parameters for the video transcode node.
|
|
3271
|
+
* Controls output format, codecs, and quality settings for video transcoding.
|
|
3272
|
+
*/
|
|
3273
|
+
type TranscodeVideoParams = z.infer<typeof transcodeVideoParamsSchema>;
|
|
3274
|
+
//#endregion
|
|
3275
|
+
//#region src/flow/plugins/types/trim-video-node.d.ts
|
|
3276
|
+
/**
|
|
3277
|
+
* Zod schema for validating video trim parameters.
|
|
3278
|
+
* Defines the structure and validation rules for extracting video segments.
|
|
3279
|
+
*/
|
|
3280
|
+
declare const trimVideoParamsSchema: z.ZodObject<{
|
|
3281
|
+
startTime: z.ZodNumber;
|
|
3282
|
+
endTime: z.ZodOptional<z.ZodNumber>;
|
|
3283
|
+
duration: z.ZodOptional<z.ZodNumber>;
|
|
3284
|
+
}, z.core.$strip>;
|
|
3285
|
+
/**
|
|
3286
|
+
* Parameters for the video trim node.
|
|
3287
|
+
* Controls the time range for extracting video segments.
|
|
3288
|
+
*/
|
|
3289
|
+
type TrimVideoParams = z.infer<typeof trimVideoParamsSchema>;
|
|
3290
|
+
//#endregion
|
|
3291
|
+
//#region src/flow/plugins/video-plugin.d.ts
|
|
3292
|
+
/**
|
|
3293
|
+
* Input type for streaming video operations.
|
|
3294
|
+
* Accepts either buffered input (Uint8Array) or streaming input (Effect Stream).
|
|
3295
|
+
* Streaming input is only supported for specific formats like MPEG-TS.
|
|
3296
|
+
*/
|
|
3297
|
+
type VideoStreamInput = Uint8Array | Stream.Stream<Uint8Array, UploadistaError>;
|
|
3298
|
+
/**
|
|
3299
|
+
* Options for streaming video operations.
|
|
3300
|
+
*/
|
|
3301
|
+
type VideoStreamOptions = {
|
|
3302
|
+
/**
|
|
3303
|
+
* Hint for input format to help determine if streaming input is possible.
|
|
3304
|
+
* MPEG-TS format supports true streaming input; other formats require buffering.
|
|
3305
|
+
*/
|
|
3306
|
+
inputFormat?: string;
|
|
3307
|
+
};
|
|
3308
|
+
/**
|
|
3309
|
+
* Shape definition for the Video Plugin interface.
|
|
3310
|
+
* Defines the contract that all video processing implementations must follow.
|
|
3311
|
+
*/
|
|
3312
|
+
type VideoPluginShape = {
|
|
3313
|
+
/**
|
|
3314
|
+
* Transcodes a video to a different format/codec.
|
|
3315
|
+
*
|
|
3316
|
+
* @param input - The input video as a Uint8Array
|
|
3317
|
+
* @param options - Transcode parameters including format, codec, and bitrates
|
|
3318
|
+
* @returns An Effect that resolves to the transcoded video as a Uint8Array
|
|
3319
|
+
* @throws {UploadistaError} When video transcoding fails
|
|
3320
|
+
*/
|
|
3321
|
+
transcode: (input: Uint8Array, options: TranscodeVideoParams) => Effect.Effect<Uint8Array, UploadistaError>;
|
|
3322
|
+
/**
|
|
3323
|
+
* Resizes a video to specified dimensions.
|
|
3324
|
+
*
|
|
3325
|
+
* @param input - The input video as a Uint8Array
|
|
3326
|
+
* @param options - Resize parameters including width, height, and aspect ratio handling
|
|
3327
|
+
* @returns An Effect that resolves to the resized video as a Uint8Array
|
|
3328
|
+
* @throws {UploadistaError} When video resizing fails
|
|
3329
|
+
*/
|
|
3330
|
+
resize: (input: Uint8Array, options: ResizeVideoParams) => Effect.Effect<Uint8Array, UploadistaError>;
|
|
3331
|
+
/**
|
|
3332
|
+
* Trims a video to extract a segment by time range.
|
|
3333
|
+
*
|
|
3334
|
+
* @param input - The input video as a Uint8Array
|
|
3335
|
+
* @param options - Trim parameters including start time and end time/duration
|
|
3336
|
+
* @returns An Effect that resolves to the trimmed video as a Uint8Array
|
|
3337
|
+
* @throws {UploadistaError} When video trimming fails
|
|
3338
|
+
*/
|
|
3339
|
+
trim: (input: Uint8Array, options: TrimVideoParams) => Effect.Effect<Uint8Array, UploadistaError>;
|
|
3340
|
+
/**
|
|
3341
|
+
* Extracts a single frame from the video at a specific timestamp.
|
|
3342
|
+
*
|
|
3343
|
+
* @param input - The input video as a Uint8Array
|
|
3344
|
+
* @param options - Frame extraction parameters including timestamp and format
|
|
3345
|
+
* @returns An Effect that resolves to the extracted frame as a Uint8Array (image)
|
|
3346
|
+
* @throws {UploadistaError} When frame extraction fails
|
|
3347
|
+
*/
|
|
3348
|
+
extractFrame: (input: Uint8Array, options: ExtractFrameVideoParams) => Effect.Effect<Uint8Array, UploadistaError>;
|
|
3349
|
+
/**
|
|
3350
|
+
* Extracts metadata from a video file.
|
|
3351
|
+
*
|
|
3352
|
+
* @param input - The input video as a Uint8Array
|
|
3353
|
+
* @returns An Effect that resolves to VideoMetadata with comprehensive video information
|
|
3354
|
+
* @throws {UploadistaError} When metadata extraction fails
|
|
3355
|
+
*/
|
|
3356
|
+
describe: (input: Uint8Array) => Effect.Effect<DescribeVideoMetadata, UploadistaError>;
|
|
3357
|
+
/**
|
|
3358
|
+
* Transcodes a video using streaming for memory-efficient processing of large files.
|
|
3359
|
+
*
|
|
3360
|
+
* This method outputs the transcoded video as a stream, reducing peak memory usage.
|
|
3361
|
+
* For input, it accepts either a buffered Uint8Array or a Stream. Streaming input
|
|
3362
|
+
* is only supported for MPEG-TS format; other formats will be buffered internally.
|
|
3363
|
+
*
|
|
3364
|
+
* @param input - The input video as Uint8Array or Stream (MPEG-TS only for streaming)
|
|
3365
|
+
* @param options - Transcode parameters including format, codec, and bitrates
|
|
3366
|
+
* @param streamOptions - Optional streaming configuration including input format hint
|
|
3367
|
+
* @returns An Effect that resolves to a Stream of the transcoded video bytes
|
|
3368
|
+
* @throws {UploadistaError} When video transcoding fails
|
|
3369
|
+
*
|
|
3370
|
+
* @example
|
|
3371
|
+
* ```typescript
|
|
3372
|
+
* const program = Effect.gen(function* () {
|
|
3373
|
+
* const videoPlugin = yield* VideoPlugin;
|
|
3374
|
+
* const inputStream = yield* dataStore.readStream(fileId);
|
|
3375
|
+
* const outputStream = yield* videoPlugin.transcodeStream(inputStream, {
|
|
3376
|
+
* format: "mp4",
|
|
3377
|
+
* codec: "h264"
|
|
3378
|
+
* }, { inputFormat: "video/mp2t" });
|
|
3379
|
+
* return outputStream;
|
|
3380
|
+
* });
|
|
3381
|
+
* ```
|
|
3382
|
+
*/
|
|
3383
|
+
transcodeStream?: (input: VideoStreamInput, options: TranscodeVideoParams, streamOptions?: VideoStreamOptions) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;
|
|
3384
|
+
/**
|
|
3385
|
+
* Resizes a video using streaming for memory-efficient processing of large files.
|
|
3386
|
+
*
|
|
3387
|
+
* This method outputs the resized video as a stream, reducing peak memory usage.
|
|
3388
|
+
* For input, it accepts either a buffered Uint8Array or a Stream. Streaming input
|
|
3389
|
+
* is only supported for MPEG-TS format; other formats will be buffered internally.
|
|
3390
|
+
*
|
|
3391
|
+
* @param input - The input video as Uint8Array or Stream (MPEG-TS only for streaming)
|
|
3392
|
+
* @param options - Resize parameters including width, height, and aspect ratio
|
|
3393
|
+
* @param streamOptions - Optional streaming configuration including input format hint
|
|
3394
|
+
* @returns An Effect that resolves to a Stream of the resized video bytes
|
|
3395
|
+
* @throws {UploadistaError} When video resizing fails
|
|
3396
|
+
*
|
|
3397
|
+
* @example
|
|
3398
|
+
* ```typescript
|
|
3399
|
+
* const program = Effect.gen(function* () {
|
|
3400
|
+
* const videoPlugin = yield* VideoPlugin;
|
|
3401
|
+
* const inputStream = yield* dataStore.readStream(fileId);
|
|
3402
|
+
* const outputStream = yield* videoPlugin.resizeStream(inputStream, {
|
|
3403
|
+
* width: 1280,
|
|
3404
|
+
* height: 720,
|
|
3405
|
+
* aspectRatio: "keep"
|
|
3406
|
+
* });
|
|
3407
|
+
* return outputStream;
|
|
3408
|
+
* });
|
|
3409
|
+
* ```
|
|
3410
|
+
*/
|
|
3411
|
+
resizeStream?: (input: VideoStreamInput, options: ResizeVideoParams, streamOptions?: VideoStreamOptions) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;
|
|
3412
|
+
/**
|
|
3413
|
+
* Trims a video using streaming for memory-efficient processing of large files.
|
|
3414
|
+
*
|
|
3415
|
+
* This method outputs the trimmed video as a stream, reducing peak memory usage.
|
|
3416
|
+
* For input, it accepts either a buffered Uint8Array or a Stream. Streaming input
|
|
3417
|
+
* is only supported for MPEG-TS format; other formats will be buffered internally.
|
|
3418
|
+
*
|
|
3419
|
+
* @param input - The input video as Uint8Array or Stream (MPEG-TS only for streaming)
|
|
3420
|
+
* @param options - Trim parameters including start time and end time/duration
|
|
3421
|
+
* @param streamOptions - Optional streaming configuration including input format hint
|
|
3422
|
+
* @returns An Effect that resolves to a Stream of the trimmed video bytes
|
|
3423
|
+
* @throws {UploadistaError} When video trimming fails
|
|
3424
|
+
*
|
|
3425
|
+
* @example
|
|
3426
|
+
* ```typescript
|
|
3427
|
+
* const program = Effect.gen(function* () {
|
|
3428
|
+
* const videoPlugin = yield* VideoPlugin;
|
|
3429
|
+
* const inputStream = yield* dataStore.readStream(fileId);
|
|
3430
|
+
* const outputStream = yield* videoPlugin.trimStream(inputStream, {
|
|
3431
|
+
* startTime: 10,
|
|
3432
|
+
* endTime: 30
|
|
3433
|
+
* });
|
|
3434
|
+
* return outputStream;
|
|
3435
|
+
* });
|
|
3436
|
+
* ```
|
|
3437
|
+
*/
|
|
3438
|
+
trimStream?: (input: VideoStreamInput, options: TrimVideoParams, streamOptions?: VideoStreamOptions) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;
|
|
3439
|
+
/**
|
|
3440
|
+
* Indicates whether this plugin supports streaming operations.
|
|
3441
|
+
* Returns true if streaming methods are available and functional.
|
|
3442
|
+
*/
|
|
3443
|
+
supportsStreaming?: boolean;
|
|
3444
|
+
};
|
|
3445
|
+
declare const VideoPlugin_base: Context.TagClass<VideoPlugin, "VideoPlugin", VideoPluginShape>;
|
|
3446
|
+
/**
|
|
3447
|
+
* Context tag for the Video Plugin.
|
|
3448
|
+
*
|
|
3449
|
+
* This tag provides a type-safe way to access video processing functionality
|
|
3450
|
+
* throughout the application using Effect's dependency injection system.
|
|
3451
|
+
*
|
|
3452
|
+
* @example
|
|
3453
|
+
* ```typescript
|
|
3454
|
+
* import { VideoPlugin } from "@uploadista/core/flow/plugins";
|
|
3455
|
+
*
|
|
3456
|
+
* // In your flow node
|
|
3457
|
+
* const program = Effect.gen(function* () {
|
|
3458
|
+
* const videoPlugin = yield* VideoPlugin;
|
|
3459
|
+
* const transcoded = yield* videoPlugin.transcode(videoData, { format: "webm", codec: "vp9" });
|
|
3460
|
+
* const resized = yield* videoPlugin.resize(transcoded, { width: 1280, height: 720, aspectRatio: "keep" });
|
|
3461
|
+
* return resized;
|
|
3462
|
+
* });
|
|
3463
|
+
* ```
|
|
3464
|
+
*/
|
|
3465
|
+
declare class VideoPlugin extends VideoPlugin_base {}
|
|
3466
|
+
type VideoPluginLayer = Layer.Layer<VideoPlugin, never, never>;
|
|
3467
|
+
//#endregion
|
|
3468
|
+
//#region src/flow/plugins/virus-scan-plugin.d.ts
|
|
3469
|
+
/**
|
|
3470
|
+
* Result of a virus scan operation.
|
|
3471
|
+
*/
|
|
3472
|
+
type ScanResult = {
|
|
3473
|
+
/**
|
|
3474
|
+
* Whether the file is clean (no viruses detected)
|
|
3475
|
+
*/
|
|
3476
|
+
isClean: boolean;
|
|
3477
|
+
/**
|
|
3478
|
+
* Array of detected virus/malware names (empty if clean)
|
|
3479
|
+
*/
|
|
3480
|
+
detectedViruses: string[];
|
|
3481
|
+
};
|
|
3482
|
+
/**
|
|
3483
|
+
* Comprehensive metadata about a virus scan operation.
|
|
3484
|
+
*/
|
|
3485
|
+
type ScanMetadata = {
|
|
3486
|
+
/**
|
|
3487
|
+
* Whether the file was scanned
|
|
3488
|
+
*/
|
|
3489
|
+
scanned: boolean;
|
|
3490
|
+
/**
|
|
3491
|
+
* Whether the file is clean (no viruses detected)
|
|
3492
|
+
*/
|
|
3493
|
+
isClean: boolean;
|
|
3494
|
+
/**
|
|
3495
|
+
* Array of detected virus/malware names (empty if clean)
|
|
3496
|
+
*/
|
|
3497
|
+
detectedViruses: string[];
|
|
3498
|
+
/**
|
|
3499
|
+
* ISO 8601 timestamp of when the scan was performed
|
|
3500
|
+
*/
|
|
3501
|
+
scanDate: string;
|
|
3502
|
+
/**
|
|
3503
|
+
* Version of the antivirus engine used
|
|
3504
|
+
*/
|
|
3505
|
+
engineVersion: string;
|
|
3506
|
+
/**
|
|
3507
|
+
* ISO 8601 timestamp of when virus definitions were last updated
|
|
3508
|
+
*/
|
|
3509
|
+
definitionsDate: string;
|
|
3510
|
+
};
|
|
3511
|
+
/**
|
|
3512
|
+
* Shape definition for the Virus Scan Plugin interface.
|
|
3513
|
+
* Defines the contract that all virus scanning implementations must follow.
|
|
3514
|
+
*/
|
|
3515
|
+
type VirusScanPluginShape = {
|
|
3516
|
+
/**
|
|
3517
|
+
* Scans a file for viruses and malware.
|
|
3518
|
+
*
|
|
3519
|
+
* @param input - The input file as a Uint8Array
|
|
3520
|
+
* @returns An Effect that resolves to ScanResult with detection information
|
|
3521
|
+
* @throws {UploadistaError} When virus scanning fails or ClamAV is unavailable
|
|
3522
|
+
*
|
|
3523
|
+
* @example
|
|
3524
|
+
* ```typescript
|
|
3525
|
+
* const program = Effect.gen(function* () {
|
|
3526
|
+
* const virusScanPlugin = yield* VirusScanPlugin;
|
|
3527
|
+
* const result = yield* virusScanPlugin.scan(fileData);
|
|
3528
|
+
* if (!result.isClean) {
|
|
3529
|
+
* console.log('Viruses detected:', result.detectedViruses);
|
|
3530
|
+
* }
|
|
3531
|
+
* });
|
|
3532
|
+
* ```
|
|
3533
|
+
*/
|
|
3534
|
+
scan: (input: Uint8Array) => Effect.Effect<ScanResult, UploadistaError>;
|
|
3535
|
+
/**
|
|
3536
|
+
* Retrieves the version of the antivirus engine.
|
|
3537
|
+
*
|
|
3538
|
+
* @returns An Effect that resolves to the engine version string
|
|
3539
|
+
* @throws {UploadistaError} When version retrieval fails
|
|
3540
|
+
*
|
|
3541
|
+
* @example
|
|
3542
|
+
* ```typescript
|
|
3543
|
+
* const program = Effect.gen(function* () {
|
|
3544
|
+
* const virusScanPlugin = yield* VirusScanPlugin;
|
|
3545
|
+
* const version = yield* virusScanPlugin.getVersion();
|
|
3546
|
+
* console.log('ClamAV version:', version);
|
|
3547
|
+
* });
|
|
3548
|
+
* ```
|
|
3549
|
+
*/
|
|
3550
|
+
getVersion: () => Effect.Effect<string, UploadistaError>;
|
|
3551
|
+
};
|
|
3552
|
+
declare const VirusScanPlugin_base: Context.TagClass<VirusScanPlugin, "VirusScanPlugin", VirusScanPluginShape>;
|
|
3553
|
+
/**
|
|
3554
|
+
* Context tag for the Virus Scan Plugin.
|
|
3555
|
+
*
|
|
3556
|
+
* This tag provides a type-safe way to access virus scanning functionality
|
|
3557
|
+
* throughout the application using Effect's dependency injection system.
|
|
3558
|
+
*
|
|
3559
|
+
* @example
|
|
3560
|
+
* ```typescript
|
|
3561
|
+
* import { VirusScanPlugin } from "@uploadista/core/flow/plugins";
|
|
3562
|
+
*
|
|
3563
|
+
* // In your flow node
|
|
3564
|
+
* const program = Effect.gen(function* () {
|
|
3565
|
+
* const virusScanPlugin = yield* VirusScanPlugin;
|
|
3566
|
+
* const result = yield* virusScanPlugin.scan(fileData);
|
|
3567
|
+
*
|
|
3568
|
+
* if (!result.isClean) {
|
|
3569
|
+
* // Handle infected file
|
|
3570
|
+
* return Effect.fail(new UploadistaError({
|
|
3571
|
+
* code: "VIRUS_DETECTED",
|
|
3572
|
+
* message: `Viruses detected: ${result.detectedViruses.join(', ')}`
|
|
3573
|
+
* }));
|
|
3574
|
+
* }
|
|
3575
|
+
*
|
|
3576
|
+
* return fileData;
|
|
3577
|
+
* });
|
|
3578
|
+
* ```
|
|
3579
|
+
*/
|
|
3580
|
+
declare class VirusScanPlugin extends VirusScanPlugin_base {}
|
|
3581
|
+
type VirusScanPluginLayer = Layer.Layer<VirusScanPlugin, never, never>;
|
|
3582
|
+
//#endregion
|
|
3583
|
+
//#region src/flow/plugins/zip-plugin.d.ts
|
|
3584
|
+
/**
|
|
3585
|
+
* Parameters for creating a ZIP archive.
|
|
3586
|
+
*/
|
|
3587
|
+
type ZipParams = {
|
|
3588
|
+
/** Name of the ZIP file to create */zipName: string; /** Whether to include file metadata in the ZIP archive */
|
|
3589
|
+
includeMetadata: boolean;
|
|
3590
|
+
};
|
|
3591
|
+
/**
|
|
3592
|
+
* Input data structure for ZIP operations.
|
|
3593
|
+
* Represents a single file to be included in the ZIP archive.
|
|
3594
|
+
*/
|
|
3595
|
+
type ZipInput = {
|
|
3596
|
+
/** Unique identifier for the file */id: string; /** Binary data of the file */
|
|
3597
|
+
data: Uint8Array; /** File metadata including name, size, type, etc. */
|
|
3598
|
+
metadata: UploadFile["metadata"];
|
|
3599
|
+
};
|
|
3600
|
+
/**
|
|
3601
|
+
* Shape definition for the ZIP Plugin interface.
|
|
3602
|
+
* Defines the contract that all ZIP implementations must follow.
|
|
3603
|
+
*/
|
|
3604
|
+
type ZipPluginShape = {
|
|
3605
|
+
/**
|
|
3606
|
+
* Creates a ZIP archive from multiple input files.
|
|
3607
|
+
*
|
|
3608
|
+
* @param inputs - Array of files to include in the ZIP archive
|
|
3609
|
+
* @param options - Configuration options for the ZIP creation
|
|
3610
|
+
* @returns An Effect that resolves to the ZIP file as a Uint8Array
|
|
3611
|
+
* @throws {UploadistaError} When ZIP creation fails
|
|
3612
|
+
*/
|
|
3613
|
+
zip: (inputs: ZipInput[], options: ZipParams) => Effect.Effect<Uint8Array, UploadistaError>;
|
|
3614
|
+
};
|
|
3615
|
+
declare const ZipPlugin_base: Context.TagClass<ZipPlugin, "ZipPlugin", ZipPluginShape>;
|
|
3616
|
+
/**
|
|
3617
|
+
* Context tag for the ZIP Plugin.
|
|
3618
|
+
*
|
|
3619
|
+
* This tag provides a type-safe way to access ZIP functionality
|
|
3620
|
+
* throughout the application using Effect's dependency injection system.
|
|
3621
|
+
*
|
|
3622
|
+
* @example
|
|
3623
|
+
* ```typescript
|
|
3624
|
+
* import { ZipPlugin } from "@uploadista/core/flow/plugins";
|
|
3625
|
+
*
|
|
3626
|
+
* // In your flow node
|
|
3627
|
+
* const program = Effect.gen(function* () {
|
|
3628
|
+
* const zipPlugin = yield* ZipPlugin;
|
|
3629
|
+
* const zipData = yield* zipPlugin.zip(files, { zipName: "archive.zip", includeMetadata: true });
|
|
3630
|
+
* return zipData;
|
|
3631
|
+
* });
|
|
3632
|
+
* ```
|
|
3633
|
+
*/
|
|
3634
|
+
declare class ZipPlugin extends ZipPlugin_base {}
|
|
3635
|
+
type ZipPluginLayer = Layer.Layer<ZipPlugin, never, never>;
|
|
3636
|
+
//#endregion
|
|
3637
|
+
//#region src/flow/plugins/plugins.d.ts
|
|
3638
|
+
type Plugin = ImagePlugin | ImageAiPlugin | VideoPlugin | DocumentPlugin | DocumentAiPlugin | VirusScanPlugin | CredentialProvider | ZipPlugin;
|
|
3639
|
+
type PluginLayer = ImagePluginLayer | ImageAiPluginLayer | VideoPluginLayer | DocumentPluginLayer | DocumentAiPluginLayer | VirusScanPluginLayer | CredentialProviderLayer | ZipPluginLayer;
|
|
3640
|
+
//#endregion
|
|
3641
|
+
//#region src/flow/plugins/types/describe-image-node.d.ts
|
|
3642
|
+
/**
|
|
3643
|
+
* Zod schema for validating describe image node parameters.
|
|
3644
|
+
* Defines the structure and validation rules for image description requests.
|
|
3645
|
+
*/
|
|
3646
|
+
declare const describeImageParamsSchema: z.ZodObject<{
|
|
3647
|
+
serviceType: z.ZodOptional<z.ZodEnum<{
|
|
3648
|
+
replicate: "replicate";
|
|
3649
|
+
}>>;
|
|
3650
|
+
}, z.core.$strip>;
|
|
3651
|
+
/**
|
|
3652
|
+
* Parameters for the describe image node.
|
|
3653
|
+
* Controls which AI service to use for generating image descriptions.
|
|
3654
|
+
*/
|
|
3655
|
+
type DescribeImageParams = z.infer<typeof describeImageParamsSchema>;
|
|
3656
|
+
//#endregion
|
|
3657
|
+
//#region src/flow/plugins/types/remove-background-node.d.ts
|
|
3658
|
+
/**
|
|
3659
|
+
* Zod schema for validating remove background node parameters.
|
|
3660
|
+
* Defines the structure and validation rules for background removal requests.
|
|
3661
|
+
*/
|
|
3662
|
+
declare const removeBackgroundParamsSchema: z.ZodObject<{
|
|
3663
|
+
serviceType: z.ZodOptional<z.ZodEnum<{
|
|
3664
|
+
replicate: "replicate";
|
|
3665
|
+
}>>;
|
|
3666
|
+
}, z.core.$strip>;
|
|
3667
|
+
/**
|
|
3668
|
+
* Parameters for the remove background node.
|
|
3669
|
+
* Controls which AI service to use for background removal processing.
|
|
3670
|
+
*/
|
|
3671
|
+
type RemoveBackgroundParams = z.infer<typeof removeBackgroundParamsSchema>;
|
|
3672
|
+
//#endregion
|
|
3673
|
+
//#region src/flow/type-guards.d.ts
|
|
3674
|
+
/**
|
|
3675
|
+
* A narrowed typed output with a specific node type and data type.
|
|
3676
|
+
* Unlike TypedOutput<T>, this type has a required nodeType field and
|
|
3677
|
+
* excludes BuiltInTypedOutput from the union, providing better type narrowing.
|
|
3678
|
+
*
|
|
3679
|
+
* @template T - The TypeScript type of the output data
|
|
3680
|
+
* @template TNodeType - The literal string type of the node type ID
|
|
3681
|
+
*/
|
|
3682
|
+
type NarrowedTypedOutput<T, TNodeType extends string = string> = {
|
|
3683
|
+
nodeType: TNodeType;
|
|
3684
|
+
data: T;
|
|
3685
|
+
nodeId: string;
|
|
3686
|
+
timestamp: string;
|
|
3687
|
+
};
|
|
3688
|
+
/**
|
|
3689
|
+
* Factory function to create type guards for specific node types.
|
|
3690
|
+
*
|
|
3691
|
+
* Creates a TypeScript type guard that validates both the type tag and
|
|
3692
|
+
* the data structure against the registered schema. This enables type-safe
|
|
3693
|
+
* narrowing of TypedOutput objects in TypeScript.
|
|
3694
|
+
*
|
|
3695
|
+
* @template T - The expected TypeScript type after narrowing
|
|
3696
|
+
* @template TNodeType - The literal string type of the node type ID
|
|
3697
|
+
* @param typeId - The registered type ID to check against (e.g., "storage-output-v1")
|
|
3698
|
+
* @returns A type guard function that narrows TypedOutput to NarrowedTypedOutput<T, TNodeType>
|
|
3699
|
+
*
|
|
3700
|
+
* @example
|
|
3701
|
+
* ```typescript
|
|
3702
|
+
* import { createTypeGuard } from "@uploadista/core/flow";
|
|
3703
|
+
* import { z } from "zod";
|
|
3704
|
+
*
|
|
3705
|
+
* const descriptionSchema = z.object({
|
|
3706
|
+
* description: z.string(),
|
|
3707
|
+
* confidence: z.number(),
|
|
3708
|
+
* });
|
|
3709
|
+
*
|
|
3710
|
+
* type DescriptionOutput = z.infer<typeof descriptionSchema>;
|
|
3711
|
+
*
|
|
3712
|
+
* const isDescriptionOutput = createTypeGuard<DescriptionOutput>(
|
|
3713
|
+
* "description-output-v1"
|
|
3714
|
+
* );
|
|
3715
|
+
*
|
|
3716
|
+
* // Use in code
|
|
3717
|
+
* if (isDescriptionOutput(output)) {
|
|
3718
|
+
* // output.data is typed as DescriptionOutput
|
|
3719
|
+
* console.log(output.data.description);
|
|
3720
|
+
* }
|
|
3721
|
+
* ```
|
|
3722
|
+
*/
|
|
3723
|
+
declare function createTypeGuard<T, TNodeType extends string = string>(typeId: TNodeType): (output: TypedOutput) => output is NarrowedTypedOutput<T, TNodeType>;
|
|
3724
|
+
/**
|
|
3725
|
+
* Type guard for UploadFile objects.
|
|
3726
|
+
*
|
|
3727
|
+
* Validates that a value is a valid UploadFile by checking its structure against the schema.
|
|
3728
|
+
* This is useful for determining if a node result is an UploadFile, which affects
|
|
3729
|
+
* auto-persistence and intermediate file tracking.
|
|
3730
|
+
*
|
|
3731
|
+
* @param value - The value to check
|
|
3732
|
+
* @returns True if the value is a valid UploadFile
|
|
3733
|
+
*
|
|
3734
|
+
* @example
|
|
3735
|
+
* ```typescript
|
|
3736
|
+
* import { isUploadFile } from "@uploadista/core/flow";
|
|
3737
|
+
*
|
|
3738
|
+
* if (isUploadFile(nodeResult)) {
|
|
3739
|
+
* // nodeResult is typed as UploadFile
|
|
3740
|
+
* console.log("File ID:", nodeResult.id);
|
|
3741
|
+
* console.log("Storage:", nodeResult.storage.id);
|
|
3742
|
+
* }
|
|
3743
|
+
* ```
|
|
3744
|
+
*/
|
|
3745
|
+
declare function isUploadFile(value: unknown): value is UploadFile;
|
|
3746
|
+
/**
|
|
3747
|
+
* Type guard for storage output nodes.
|
|
3748
|
+
*
|
|
3749
|
+
* Validates that an output is from a storage node and contains valid UploadFile data.
|
|
3750
|
+
*
|
|
3751
|
+
* @param output - The output to check
|
|
3752
|
+
* @returns True if the output is a storage output with valid UploadFile data
|
|
3753
|
+
*
|
|
3754
|
+
* @example
|
|
3755
|
+
* ```typescript
|
|
3756
|
+
* import { isStorageOutput } from "@uploadista/core/flow";
|
|
3757
|
+
*
|
|
3758
|
+
* if (isStorageOutput(output)) {
|
|
3759
|
+
* // output.data is typed as UploadFile
|
|
3760
|
+
* console.log("File URL:", output.data.url);
|
|
3761
|
+
* console.log("File size:", output.data.size);
|
|
3762
|
+
* }
|
|
3763
|
+
* ```
|
|
3764
|
+
*/
|
|
3765
|
+
declare const isStorageOutput: (output: TypedOutput) => output is NarrowedTypedOutput<UploadFile, string>;
|
|
3766
|
+
/**
|
|
3767
|
+
* Type guard for OCR output nodes.
|
|
3768
|
+
*
|
|
3769
|
+
* Validates that an output is from an OCR node and contains valid structured OCR data.
|
|
3770
|
+
*
|
|
3771
|
+
* @param output - The output to check
|
|
3772
|
+
* @returns True if the output is an OCR output with valid structured text data
|
|
3773
|
+
*
|
|
3774
|
+
* @example
|
|
3775
|
+
* ```typescript
|
|
3776
|
+
* import { isOcrOutput } from "@uploadista/core/flow";
|
|
3777
|
+
*
|
|
3778
|
+
* if (isOcrOutput(output)) {
|
|
3779
|
+
* // output.data is typed as OcrOutput
|
|
3780
|
+
* console.log("Extracted text:", output.data.extractedText);
|
|
3781
|
+
* console.log("Format:", output.data.format);
|
|
3782
|
+
* console.log("Task type:", output.data.taskType);
|
|
3783
|
+
* }
|
|
3784
|
+
* ```
|
|
3785
|
+
*/
|
|
3786
|
+
declare const isOcrOutput: (output: TypedOutput) => output is NarrowedTypedOutput<{
|
|
3787
|
+
extractedText: string;
|
|
3788
|
+
format: "markdown" | "plain" | "structured";
|
|
3789
|
+
taskType: "convertToMarkdown" | "freeOcr" | "parseFigure" | "locateObject";
|
|
3790
|
+
confidence?: number | undefined;
|
|
3791
|
+
}, string>;
|
|
3792
|
+
/**
|
|
3793
|
+
* Type guard for image description output nodes.
|
|
3794
|
+
*
|
|
3795
|
+
* Validates that an output is from an image description node and contains valid description data.
|
|
3796
|
+
*
|
|
3797
|
+
* @param output - The output to check
|
|
3798
|
+
* @returns True if the output is an image description output with valid description data
|
|
3799
|
+
*
|
|
3800
|
+
* @example
|
|
3801
|
+
* ```typescript
|
|
3802
|
+
* import { isImageDescriptionOutput } from "@uploadista/core/flow";
|
|
3803
|
+
*
|
|
3804
|
+
* if (isImageDescriptionOutput(output)) {
|
|
3805
|
+
* // output.data is typed as ImageDescriptionOutput
|
|
3806
|
+
* console.log("Description:", output.data.description);
|
|
3807
|
+
* console.log("Confidence:", output.data.confidence);
|
|
3808
|
+
* }
|
|
3809
|
+
* ```
|
|
3810
|
+
*/
|
|
3811
|
+
declare const isImageDescriptionOutput: (output: TypedOutput) => output is NarrowedTypedOutput<{
|
|
3812
|
+
description: string;
|
|
3813
|
+
confidence?: number | undefined;
|
|
3814
|
+
metadata?: Record<string, unknown> | undefined;
|
|
3815
|
+
}, string>;
|
|
3816
|
+
/**
|
|
3817
|
+
* Filter an array of outputs to only those matching a specific type.
|
|
3818
|
+
*
|
|
3819
|
+
* This helper function filters outputs using a type guard and returns a
|
|
3820
|
+
* properly typed array of results. It's useful for extracting specific
|
|
3821
|
+
* output types from multi-output flows.
|
|
3822
|
+
*
|
|
3823
|
+
* @template TOutput - The expected narrowed output type
|
|
3824
|
+
* @param outputs - Array of typed outputs to filter
|
|
3825
|
+
* @param typeGuard - Type guard function to use for filtering
|
|
3826
|
+
* @returns Array of outputs that match the type guard, properly typed
|
|
3827
|
+
*
|
|
3828
|
+
* @example
|
|
3829
|
+
* ```typescript
|
|
3830
|
+
* import { filterOutputsByType, isStorageOutput } from "@uploadista/core/flow";
|
|
3831
|
+
*
|
|
3832
|
+
* // Get all storage outputs from a multi-output flow
|
|
3833
|
+
* const storageOutputs = filterOutputsByType(
|
|
3834
|
+
* flowResult.outputs,
|
|
3835
|
+
* isStorageOutput
|
|
3836
|
+
* );
|
|
3837
|
+
*
|
|
3838
|
+
* for (const output of storageOutputs) {
|
|
3839
|
+
* // Each output.data is typed as UploadFile
|
|
3840
|
+
* console.log("Saved file:", output.data.url);
|
|
3841
|
+
* }
|
|
3842
|
+
* ```
|
|
3843
|
+
*/
|
|
3844
|
+
declare function filterOutputsByType<TOutput extends TypedOutput>(outputs: TypedOutput[], typeGuard: (output: TypedOutput) => output is TOutput): TOutput[];
|
|
3845
|
+
/**
|
|
3846
|
+
* Get a single output of a specific type from an array of outputs.
|
|
3847
|
+
*
|
|
3848
|
+
* This helper function finds exactly one output matching the type guard.
|
|
3849
|
+
* It throws an error if no outputs match or if multiple outputs match,
|
|
3850
|
+
* ensuring the caller receives exactly the expected result.
|
|
3851
|
+
*
|
|
3852
|
+
* @template TOutput - The expected narrowed output type
|
|
3853
|
+
* @param outputs - Array of typed outputs to search
|
|
3854
|
+
* @param typeGuard - Type guard function to use for matching
|
|
3855
|
+
* @returns The single matching output, properly typed
|
|
3856
|
+
* @throws {UploadistaError} If no outputs match (OUTPUT_NOT_FOUND)
|
|
3857
|
+
* @throws {UploadistaError} If multiple outputs match (MULTIPLE_OUTPUTS_FOUND)
|
|
3858
|
+
*
|
|
3859
|
+
* @example
|
|
3860
|
+
* ```typescript
|
|
3861
|
+
* import { getSingleOutputByType, isStorageOutput } from "@uploadista/core/flow";
|
|
3862
|
+
*
|
|
3863
|
+
* try {
|
|
3864
|
+
* const storageOutput = getSingleOutputByType(
|
|
3865
|
+
* flowResult.outputs,
|
|
3866
|
+
* isStorageOutput
|
|
3867
|
+
* );
|
|
3868
|
+
* // storageOutput.data is typed as UploadFile
|
|
3869
|
+
* console.log("File saved at:", storageOutput.data.url);
|
|
3870
|
+
* } catch (error) {
|
|
3871
|
+
* if (error.code === "OUTPUT_NOT_FOUND") {
|
|
3872
|
+
* console.error("No storage output found");
|
|
3873
|
+
* } else if (error.code === "MULTIPLE_OUTPUTS_FOUND") {
|
|
3874
|
+
* console.error("Multiple storage outputs found, expected one");
|
|
3875
|
+
* }
|
|
3876
|
+
* }
|
|
3877
|
+
* ```
|
|
3878
|
+
*/
|
|
3879
|
+
declare function getSingleOutputByType<TOutput extends TypedOutput>(outputs: TypedOutput[], typeGuard: (output: TypedOutput) => output is TOutput): Effect.Effect<TOutput, UploadistaError>;
|
|
3880
|
+
/**
|
|
3881
|
+
* Get the first output of a specific type, if any exists.
|
|
3882
|
+
*
|
|
3883
|
+
* Unlike getSingleOutputByType, this function returns undefined if no outputs
|
|
3884
|
+
* match, and returns the first match if multiple outputs exist. This is useful
|
|
3885
|
+
* when you want a more lenient matching strategy.
|
|
3886
|
+
*
|
|
3887
|
+
* @template TOutput - The expected narrowed output type
|
|
3888
|
+
* @param outputs - Array of typed outputs to search
|
|
3889
|
+
* @param typeGuard - Type guard function to use for matching
|
|
3890
|
+
* @returns The first matching output, or undefined if none match
|
|
3891
|
+
*
|
|
3892
|
+
* @example
|
|
3893
|
+
* ```typescript
|
|
3894
|
+
* import { getFirstOutputByType, isStorageOutput } from "@uploadista/core/flow";
|
|
3895
|
+
*
|
|
3896
|
+
* const storageOutput = getFirstOutputByType(
|
|
3897
|
+
* flowResult.outputs,
|
|
3898
|
+
* isStorageOutput
|
|
3899
|
+
* );
|
|
3900
|
+
*
|
|
3901
|
+
* if (storageOutput) {
|
|
3902
|
+
* console.log("First storage output:", storageOutput.data.url);
|
|
3903
|
+
* } else {
|
|
3904
|
+
* console.log("No storage outputs found");
|
|
3905
|
+
* }
|
|
3906
|
+
* ```
|
|
3907
|
+
*/
|
|
3908
|
+
declare function getFirstOutputByType<TOutput extends TypedOutput>(outputs: TypedOutput[], typeGuard: (output: TypedOutput) => output is TOutput): TOutput | undefined;
|
|
3909
|
+
/**
|
|
3910
|
+
* Get an output by its node ID.
|
|
3911
|
+
*
|
|
3912
|
+
* This helper finds an output produced by a specific node instance,
|
|
3913
|
+
* regardless of its type. Useful when you know the specific node ID
|
|
3914
|
+
* you're looking for.
|
|
3915
|
+
*
|
|
3916
|
+
* @param outputs - Array of typed outputs to search
|
|
3917
|
+
* @param nodeId - The node ID to match
|
|
3918
|
+
* @returns The output from the specified node, or undefined if not found
|
|
3919
|
+
*
|
|
3920
|
+
* @example
|
|
3921
|
+
* ```typescript
|
|
3922
|
+
* import { getOutputByNodeId } from "@uploadista/core/flow";
|
|
3923
|
+
*
|
|
3924
|
+
* const cdnOutput = getOutputByNodeId(flowResult.outputs, "cdn-storage");
|
|
3925
|
+
* if (cdnOutput) {
|
|
3926
|
+
* console.log("CDN output:", cdnOutput.data);
|
|
3927
|
+
* }
|
|
3928
|
+
* ```
|
|
3929
|
+
*/
|
|
3930
|
+
declare function getOutputByNodeId(outputs: TypedOutput[], nodeId: string): TypedOutput | undefined;
|
|
3931
|
+
/**
|
|
3932
|
+
* Check if any outputs match a specific type.
|
|
3933
|
+
*
|
|
3934
|
+
* Simple predicate function to check if at least one output of a given
|
|
3935
|
+
* type exists in the results.
|
|
3936
|
+
*
|
|
3937
|
+
* @template TOutput - The expected narrowed output type
|
|
3938
|
+
* @param outputs - Array of typed outputs to check
|
|
3939
|
+
* @param typeGuard - Type guard function to use for checking
|
|
3940
|
+
* @returns True if at least one output matches the type guard
|
|
3941
|
+
*
|
|
3942
|
+
* @example
|
|
3943
|
+
* ```typescript
|
|
3944
|
+
* import { hasOutputOfType, isStorageOutput } from "@uploadista/core/flow";
|
|
3945
|
+
*
|
|
3946
|
+
* if (hasOutputOfType(flowResult.outputs, isStorageOutput)) {
|
|
3947
|
+
* console.log("Flow produced at least one storage output");
|
|
3948
|
+
* } else {
|
|
3949
|
+
* console.log("No storage outputs in this flow");
|
|
3950
|
+
* }
|
|
3951
|
+
* ```
|
|
3952
|
+
*/
|
|
3953
|
+
declare function hasOutputOfType<TOutput extends TypedOutput>(outputs: TypedOutput[], typeGuard: (output: TypedOutput) => output is TOutput): boolean;
|
|
3954
|
+
/**
|
|
3955
|
+
* Type guard for init operation (streaming file upload initialization).
|
|
3956
|
+
*
|
|
3957
|
+
* Checks if the input data is an init operation that starts a streaming
|
|
3958
|
+
* file upload session.
|
|
3959
|
+
*
|
|
3960
|
+
* @param data - Input data to check
|
|
3961
|
+
* @returns True if data is an init operation
|
|
3962
|
+
*
|
|
3963
|
+
* @example
|
|
3964
|
+
* ```typescript
|
|
3965
|
+
* if (isInitOperation(inputData)) {
|
|
3966
|
+
* console.log("Storage ID:", inputData.storageId);
|
|
3967
|
+
* console.log("Metadata:", inputData.metadata);
|
|
3968
|
+
* }
|
|
3969
|
+
* ```
|
|
3970
|
+
*/
|
|
3971
|
+
declare function isInitOperation(data: InputData): data is Extract<InputData, {
|
|
3972
|
+
operation: "init";
|
|
3973
|
+
}>;
|
|
3974
|
+
/**
|
|
3975
|
+
* Type guard for finalize operation (complete streaming upload).
|
|
3976
|
+
*
|
|
3977
|
+
* Checks if the input data is a finalize operation that completes a
|
|
3978
|
+
* previously initialized streaming upload.
|
|
3979
|
+
*
|
|
3980
|
+
* @param data - Input data to check
|
|
3981
|
+
* @returns True if data is a finalize operation
|
|
3982
|
+
*
|
|
3983
|
+
* @example
|
|
3984
|
+
* ```typescript
|
|
3985
|
+
* if (isFinalizeOperation(inputData)) {
|
|
3986
|
+
* console.log("Upload ID:", inputData.uploadId);
|
|
3987
|
+
* }
|
|
3988
|
+
* ```
|
|
3989
|
+
*/
|
|
3990
|
+
declare function isFinalizeOperation(data: InputData): data is Extract<InputData, {
|
|
3991
|
+
operation: "finalize";
|
|
3992
|
+
}>;
|
|
3993
|
+
/**
|
|
3994
|
+
* Type guard for URL operation (direct file fetch from URL).
|
|
3995
|
+
*
|
|
3996
|
+
* Checks if the input data is a URL operation that fetches a file
|
|
3997
|
+
* directly from an external URL.
|
|
3998
|
+
*
|
|
3999
|
+
* @param data - Input data to check
|
|
4000
|
+
* @returns True if data is a URL operation
|
|
4001
|
+
*
|
|
4002
|
+
* @example
|
|
4003
|
+
* ```typescript
|
|
4004
|
+
* if (isUrlOperation(inputData)) {
|
|
4005
|
+
* console.log("Fetching from:", inputData.url);
|
|
4006
|
+
* console.log("Optional storage:", inputData.storageId);
|
|
4007
|
+
* }
|
|
4008
|
+
* ```
|
|
4009
|
+
*/
|
|
4010
|
+
declare function isUrlOperation(data: InputData): data is Extract<InputData, {
|
|
4011
|
+
operation: "url";
|
|
4012
|
+
}>;
|
|
4013
|
+
/**
|
|
4014
|
+
* Type guard for upload operations (init or url).
|
|
4015
|
+
*
|
|
4016
|
+
* Checks if the input data is either an init or URL operation (i.e., operations
|
|
4017
|
+
* that trigger new uploads, as opposed to finalize which completes an existing upload).
|
|
4018
|
+
*
|
|
4019
|
+
* @param data - Input data to check
|
|
4020
|
+
* @returns True if data is an init or URL operation
|
|
4021
|
+
*
|
|
4022
|
+
* @example
|
|
4023
|
+
* ```typescript
|
|
4024
|
+
* if (isUploadOperation(inputData)) {
|
|
4025
|
+
* // This is a new upload, not a finalization
|
|
4026
|
+
* if (isInitOperation(inputData)) {
|
|
4027
|
+
* console.log("Streaming upload");
|
|
4028
|
+
* } else {
|
|
4029
|
+
* console.log("URL fetch");
|
|
4030
|
+
* }
|
|
4031
|
+
* }
|
|
4032
|
+
* ```
|
|
4033
|
+
*/
|
|
4034
|
+
declare function isUploadOperation(data: InputData): data is Extract<InputData, {
|
|
4035
|
+
operation: "init" | "url";
|
|
4036
|
+
}>;
|
|
4037
|
+
//#endregion
|
|
4038
|
+
//#region src/flow/typed-flow.d.ts
|
|
4039
|
+
/**
|
|
4040
|
+
* Defines a node that can be used in a typed flow.
|
|
4041
|
+
*
|
|
4042
|
+
* A node definition can be either:
|
|
4043
|
+
* - A plain FlowNode object
|
|
4044
|
+
* - An Effect that resolves to a FlowNode (for nodes requiring dependencies)
|
|
4045
|
+
*
|
|
4046
|
+
* @template TNodeError - The error types that the node can produce
|
|
4047
|
+
* @template TNodeRequirements - The services/dependencies the node requires
|
|
4048
|
+
*/
|
|
4049
|
+
type NodeDefinition<TNodeError = never, TNodeRequirements = never> = FlowNode<any, any, UploadistaError> | Effect.Effect<FlowNode<any, any, UploadistaError>, TNodeError, TNodeRequirements>;
|
|
4050
|
+
/**
|
|
4051
|
+
* A record mapping node IDs to their definitions.
|
|
4052
|
+
*
|
|
4053
|
+
* This is the primary type used for defining the nodes in a typed flow,
|
|
4054
|
+
* allowing TypeScript to infer input/output schemas and requirements.
|
|
4055
|
+
*
|
|
4056
|
+
* @example
|
|
4057
|
+
* ```typescript
|
|
4058
|
+
* const nodes = {
|
|
4059
|
+
* input: fileInputNode,
|
|
4060
|
+
* resize: Effect.succeed(imageResizeNode),
|
|
4061
|
+
* output: s3OutputNode
|
|
4062
|
+
* } satisfies NodeDefinitionsRecord;
|
|
4063
|
+
* ```
|
|
4064
|
+
*/
|
|
4065
|
+
type NodeDefinitionsRecord = Record<string, NodeDefinition<any, any>>;
|
|
4066
|
+
/**
|
|
4067
|
+
* Extracts the error type from a NodeDefinition.
|
|
4068
|
+
*
|
|
4069
|
+
* If the node is an Effect, extracts its error type.
|
|
4070
|
+
* If the node is a plain FlowNode, returns never (no errors).
|
|
4071
|
+
*/
|
|
4072
|
+
type NodeDefinitionError<T> = T extends Effect.Effect<FlowNode<any, any, UploadistaError>, infer TError, any> ? TError : never;
|
|
4073
|
+
/**
|
|
4074
|
+
* Extracts the requirements (dependencies) from a NodeDefinition.
|
|
4075
|
+
*
|
|
4076
|
+
* Uses the shared ExtractEffectRequirements utility for consistency.
|
|
4077
|
+
*/
|
|
4078
|
+
type NodeDefinitionRequirements<T> = ExtractEffectRequirements<T>;
|
|
4079
|
+
/**
|
|
4080
|
+
* Extracts all possible errors from all nodes in a flow as a union.
|
|
4081
|
+
*
|
|
4082
|
+
* This iterates through all nodes in the record and combines their
|
|
4083
|
+
* error types into a single union type.
|
|
4084
|
+
*/
|
|
4085
|
+
type NodesErrorUnion<TNodes extends NodeDefinitionsRecord> = { [K in keyof TNodes]: NodeDefinitionError<TNodes[K]> }[keyof TNodes];
|
|
4086
|
+
/**
|
|
4087
|
+
* Extracts all service requirements from all nodes in a flow as a union.
|
|
4088
|
+
*
|
|
4089
|
+
* This iterates through all nodes in the record and combines their
|
|
4090
|
+
* requirement types into a single union type representing all services
|
|
4091
|
+
* needed by the flow.
|
|
4092
|
+
*
|
|
4093
|
+
* @template TNodes - The record of node definitions
|
|
4094
|
+
*
|
|
4095
|
+
* @example
|
|
4096
|
+
* ```typescript
|
|
4097
|
+
* const nodes = {
|
|
4098
|
+
* resize: imageResizeNode, // requires ImagePlugin
|
|
4099
|
+
* zip: zipNode, // requires ZipPlugin
|
|
4100
|
+
* };
|
|
4101
|
+
* type Requirements = NodesRequirementsUnion<typeof nodes>;
|
|
4102
|
+
* // Requirements = ImagePlugin | ZipPlugin
|
|
4103
|
+
* ```
|
|
4104
|
+
*/
|
|
4105
|
+
type NodesRequirementsUnion<TNodes extends NodeDefinitionsRecord> = { [K in keyof TNodes]: NodeDefinitionRequirements<TNodes[K]> }[keyof TNodes];
|
|
4106
|
+
/**
|
|
4107
|
+
* Extracts all service requirements from a flow's nodes.
|
|
4108
|
+
*
|
|
4109
|
+
* This includes all services required by any node in the flow,
|
|
4110
|
+
* including UploadEngine (which is provided by the runtime).
|
|
4111
|
+
*
|
|
4112
|
+
* @template TNodes - The record of node definitions
|
|
4113
|
+
*
|
|
4114
|
+
* @example
|
|
4115
|
+
* ```typescript
|
|
4116
|
+
* const myFlow = createFlow({
|
|
4117
|
+
* nodes: {
|
|
4118
|
+
* input: fileInputNode,
|
|
4119
|
+
* process: imageProcessNode, // requires ImagePlugin
|
|
4120
|
+
* },
|
|
4121
|
+
* edges: [...]
|
|
4122
|
+
* });
|
|
4123
|
+
* type AllRequirements = FlowRequirements<typeof myFlow.nodes>;
|
|
4124
|
+
* // AllRequirements = ImagePlugin | UploadEngine
|
|
4125
|
+
* ```
|
|
4126
|
+
*/
|
|
4127
|
+
type FlowRequirements<TNodes extends NodeDefinitionsRecord> = NodesRequirementsUnion<TNodes>;
|
|
4128
|
+
/**
|
|
4129
|
+
* Extracts plugin service requirements from a flow, excluding UploadEngine.
|
|
4130
|
+
*
|
|
4131
|
+
* This type is useful for determining which plugin layers need to be
|
|
4132
|
+
* provided when creating a server, as UploadEngine is automatically
|
|
4133
|
+
* provided by the runtime.
|
|
4134
|
+
*
|
|
4135
|
+
* @template TNodes - The record of node definitions
|
|
4136
|
+
*
|
|
4137
|
+
* @example
|
|
4138
|
+
* ```typescript
|
|
4139
|
+
* const myFlow = createFlow({
|
|
4140
|
+
* nodes: {
|
|
4141
|
+
* resize: imageResizeNode, // requires ImagePlugin
|
|
4142
|
+
* upload: s3OutputNode, // requires UploadEngine
|
|
4143
|
+
* },
|
|
4144
|
+
* edges: [...]
|
|
4145
|
+
* });
|
|
4146
|
+
* type PluginRequirements = FlowPluginRequirements<typeof myFlow.nodes>;
|
|
4147
|
+
* // PluginRequirements = ImagePlugin (UploadEngine excluded)
|
|
4148
|
+
* ```
|
|
4149
|
+
*/
|
|
4150
|
+
type FlowPluginRequirements<TNodes extends NodeDefinitionsRecord> = Exclude<FlowRequirements<TNodes>, UploadEngine>;
|
|
4151
|
+
/**
|
|
4152
|
+
* Infers the concrete FlowNode type from a NodeDefinition.
|
|
4153
|
+
*
|
|
4154
|
+
* If the definition is already a FlowNode, returns it as-is.
|
|
4155
|
+
* If the definition is an Effect, extracts the FlowNode from the Effect's success type.
|
|
4156
|
+
*
|
|
4157
|
+
* Uses the shared ResolveEffect utility for consistency.
|
|
4158
|
+
*/
|
|
4159
|
+
type InferNode<T> = T extends FlowNode<any, any, UploadistaError> ? T : ResolveEffect<T> extends FlowNode<any, any, UploadistaError> ? ResolveEffect<T> : never;
|
|
4160
|
+
type ExtractKeysByNodeType<TNodes extends NodeDefinitionsRecord, TType extends NodeType> = { [K in keyof TNodes]: InferNode<TNodes[K]>["type"] extends TType ? K : never }[keyof TNodes];
|
|
4161
|
+
type SchemaInfer<T> = T extends z.ZodTypeAny ? z.infer<T> : never;
|
|
4162
|
+
type FlowInputMap<TNodes extends NodeDefinitionsRecord> = { [K in Extract<ExtractKeysByNodeType<TNodes, NodeType.input>, string>]: SchemaInfer<InferNode<TNodes[K]>["inputSchema"]> };
|
|
4163
|
+
type FlowOutputMap<TNodes extends NodeDefinitionsRecord> = { [K in Extract<keyof TNodes, string>]: SchemaInfer<InferNode<TNodes[K]>["outputSchema"]> };
|
|
4164
|
+
type FlowInputUnion<TNodes extends NodeDefinitionsRecord> = { [K in Extract<ExtractKeysByNodeType<TNodes, NodeType.input>, string>]: SchemaInfer<InferNode<TNodes[K]>["inputSchema"]> }[Extract<ExtractKeysByNodeType<TNodes, NodeType.input>, string>];
|
|
4165
|
+
type FlowOutputUnion<TNodes extends NodeDefinitionsRecord> = { [K in Extract<keyof TNodes, string>]: SchemaInfer<InferNode<TNodes[K]>["outputSchema"]> }[Extract<keyof TNodes, string>];
|
|
4166
|
+
type NodeKey<TNodes extends NodeDefinitionsRecord> = Extract<keyof TNodes, string>;
|
|
4167
|
+
type TypedFlowEdge<TNodes extends NodeDefinitionsRecord> = {
|
|
4168
|
+
source: NodeKey<TNodes>;
|
|
4169
|
+
target: NodeKey<TNodes>;
|
|
4170
|
+
sourcePort?: string;
|
|
4171
|
+
targetPort?: string;
|
|
4172
|
+
};
|
|
4173
|
+
type TypedFlowConfig<TNodes extends NodeDefinitionsRecord> = {
|
|
4174
|
+
flowId: string;
|
|
4175
|
+
name: string;
|
|
4176
|
+
nodes: TNodes;
|
|
4177
|
+
edges: Array<TypedFlowEdge<TNodes>>;
|
|
4178
|
+
typeChecker?: TypeCompatibilityChecker;
|
|
4179
|
+
onEvent?: (event: FlowEvent) => Effect.Effect<{
|
|
4180
|
+
eventId: string | null;
|
|
4181
|
+
}, UploadistaError>;
|
|
4182
|
+
parallelExecution?: {
|
|
4183
|
+
enabled?: boolean;
|
|
4184
|
+
maxConcurrency?: number;
|
|
4185
|
+
};
|
|
4186
|
+
inputSchema?: z.ZodTypeAny;
|
|
4187
|
+
outputSchema?: z.ZodTypeAny;
|
|
4188
|
+
hooks?: {
|
|
4189
|
+
/**
|
|
4190
|
+
* Called when a sink node (terminal node with no outgoing edges) produces an output.
|
|
4191
|
+
* This hook runs after auto-persistence for UploadFile outputs.
|
|
4192
|
+
*
|
|
4193
|
+
* Use this hook to perform additional post-processing such as:
|
|
4194
|
+
* - Saving output metadata to a database
|
|
4195
|
+
* - Tracking outputs in external systems
|
|
4196
|
+
* - Adding custom metadata to outputs
|
|
4197
|
+
* - Triggering downstream workflows
|
|
4198
|
+
*
|
|
4199
|
+
* **Important**: The hook must not have any service requirements (Effect requirements must be `never`).
|
|
4200
|
+
* All necessary services should be captured in the closure when defining the hook.
|
|
4201
|
+
*
|
|
4202
|
+
* @example
|
|
4203
|
+
* ```typescript
|
|
4204
|
+
* // Using Promise (simpler for most users)
|
|
4205
|
+
* hooks: {
|
|
4206
|
+
* onNodeOutput: async ({ output }) => {
|
|
4207
|
+
* await db.save(output);
|
|
4208
|
+
* return output;
|
|
4209
|
+
* }
|
|
4210
|
+
* }
|
|
4211
|
+
* ```
|
|
4212
|
+
*/
|
|
4213
|
+
onNodeOutput?: <TOutput>(context: {
|
|
4214
|
+
output: TOutput;
|
|
4215
|
+
nodeId: string;
|
|
4216
|
+
flowId: string;
|
|
4217
|
+
jobId: string;
|
|
4218
|
+
storageId: string;
|
|
4219
|
+
clientId: string | null;
|
|
4220
|
+
}) => Effect.Effect<TOutput, UploadistaError, never> | Promise<TOutput>;
|
|
4221
|
+
};
|
|
4222
|
+
/**
|
|
4223
|
+
* Circuit breaker configuration for resilience against external service failures.
|
|
4224
|
+
*
|
|
4225
|
+
* @example
|
|
4226
|
+
* ```typescript
|
|
4227
|
+
* circuitBreaker: {
|
|
4228
|
+
* defaults: { enabled: false },
|
|
4229
|
+
* nodeTypeOverrides: {
|
|
4230
|
+
* "Describe Image": {
|
|
4231
|
+
* enabled: true,
|
|
4232
|
+
* failureThreshold: 5,
|
|
4233
|
+
* resetTimeout: 60000,
|
|
4234
|
+
* fallback: { type: "skip", passThrough: true }
|
|
4235
|
+
* }
|
|
4236
|
+
* }
|
|
4237
|
+
* }
|
|
4238
|
+
* ```
|
|
4239
|
+
*/
|
|
4240
|
+
circuitBreaker?: {
|
|
4241
|
+
/** Default circuit breaker config for all nodes */defaults?: FlowCircuitBreakerConfig; /** Override circuit breaker config per node type (node name) */
|
|
4242
|
+
nodeTypeOverrides?: Record<string, FlowCircuitBreakerConfig>;
|
|
4243
|
+
};
|
|
4244
|
+
};
|
|
4245
|
+
declare const typedFlowInputsSymbol: unique symbol;
|
|
4246
|
+
declare const typedFlowOutputsSymbol: unique symbol;
|
|
4247
|
+
declare const typedFlowPluginsSymbol: unique symbol;
|
|
4248
|
+
/**
|
|
4249
|
+
* A type-safe Flow that infers input/output types and requirements from its nodes.
|
|
4250
|
+
*
|
|
4251
|
+
* TypedFlow extends the base Flow type with additional type information that
|
|
4252
|
+
* allows TypeScript to verify inputs, outputs, and plugin requirements at compile time.
|
|
4253
|
+
*
|
|
4254
|
+
* The phantom type properties (using unique symbols) enable type-level metadata
|
|
4255
|
+
* without affecting runtime behavior, allowing other type utilities to extract
|
|
4256
|
+
* this information for validation purposes.
|
|
4257
|
+
*
|
|
4258
|
+
* @template TNodes - Record of node definitions used in the flow
|
|
4259
|
+
* @template TInputSchema - Zod schema for flow inputs (inferred from input nodes)
|
|
4260
|
+
* @template TOutputSchema - Zod schema for flow outputs (inferred from output nodes)
|
|
4261
|
+
*
|
|
4262
|
+
* @example
|
|
4263
|
+
* ```typescript
|
|
4264
|
+
* const myFlow = createFlow({
|
|
4265
|
+
* nodes: {
|
|
4266
|
+
* input: fileInputNode,
|
|
4267
|
+
* resize: imageResizeNode,
|
|
4268
|
+
* output: s3OutputNode
|
|
4269
|
+
* },
|
|
4270
|
+
* edges: [
|
|
4271
|
+
* { source: 'input', target: 'resize' },
|
|
4272
|
+
* { source: 'resize', target: 'output' }
|
|
4273
|
+
* ]
|
|
4274
|
+
* });
|
|
4275
|
+
*
|
|
4276
|
+
* // TypeScript infers:
|
|
4277
|
+
* // - Input types from fileInputNode.inputSchema
|
|
4278
|
+
* // - Output types from s3OutputNode.outputSchema
|
|
4279
|
+
* // - Requirements: ImagePlugin (from resize node)
|
|
4280
|
+
* ```
|
|
4281
|
+
*/
|
|
4282
|
+
type TypedFlow<TNodes extends NodeDefinitionsRecord, TInputSchema extends z.ZodTypeAny, TOutputSchema extends z.ZodTypeAny> = Flow<TInputSchema, TOutputSchema, FlowRequirements<TNodes>> & {
|
|
4283
|
+
run: (args: {
|
|
4284
|
+
inputs?: Partial<FlowInputMap<TNodes>>;
|
|
4285
|
+
storageId: string;
|
|
4286
|
+
jobId: string;
|
|
4287
|
+
}) => Effect.Effect<FlowExecutionResult<FlowOutputMap<TNodes>>, UploadistaError, FlowRequirements<TNodes>>;
|
|
4288
|
+
resume: (args: {
|
|
4289
|
+
jobId: string;
|
|
4290
|
+
storageId: string;
|
|
4291
|
+
nodeResults: Record<string, unknown>;
|
|
4292
|
+
executionState: {
|
|
4293
|
+
executionOrder: string[];
|
|
4294
|
+
currentIndex: number;
|
|
4295
|
+
inputs: Partial<FlowInputMap<TNodes>>;
|
|
4296
|
+
};
|
|
4297
|
+
}) => Effect.Effect<FlowExecutionResult<FlowOutputMap<TNodes>>, UploadistaError, FlowRequirements<TNodes>>;
|
|
4298
|
+
readonly [typedFlowInputsSymbol]?: FlowInputMap<TNodes>;
|
|
4299
|
+
readonly [typedFlowOutputsSymbol]?: FlowOutputMap<TNodes>;
|
|
4300
|
+
readonly [typedFlowPluginsSymbol]?: FlowPluginRequirements<TNodes>;
|
|
4301
|
+
};
|
|
4302
|
+
declare function createFlow<TNodes extends NodeDefinitionsRecord>(config: TypedFlowConfig<TNodes>): Effect.Effect<TypedFlow<TNodes, z.ZodType<FlowInputUnion<TNodes>>, z.ZodType<FlowOutputUnion<TNodes>>>, NodesErrorUnion<TNodes> | UploadistaError, FlowRequirements<TNodes>>;
|
|
4303
|
+
//#endregion
|
|
4304
|
+
//#region src/flow/types/run-args.d.ts
|
|
4305
|
+
/**
|
|
4306
|
+
* Zod schema for validating flow run arguments.
|
|
4307
|
+
*
|
|
4308
|
+
* @property inputs - Record mapping input node IDs to their input data
|
|
4309
|
+
*
|
|
4310
|
+
* @example
|
|
4311
|
+
* ```typescript
|
|
4312
|
+
* const args = {
|
|
4313
|
+
* inputs: {
|
|
4314
|
+
* "input-node-1": { file: myFile, metadata: { ... } },
|
|
4315
|
+
* "input-node-2": { file: anotherFile }
|
|
4316
|
+
* }
|
|
4317
|
+
* };
|
|
4318
|
+
*
|
|
4319
|
+
* // Validate before running
|
|
4320
|
+
* const validated = runArgsSchema.parse(args);
|
|
4321
|
+
* ```
|
|
4322
|
+
*/
|
|
4323
|
+
declare const runArgsSchema: z.ZodObject<{
|
|
4324
|
+
inputs: z.ZodRecord<z.ZodString, z.ZodAny>;
|
|
4325
|
+
}, z.core.$strip>;
|
|
4326
|
+
/**
|
|
4327
|
+
* Type representing validated flow run arguments.
|
|
4328
|
+
*
|
|
4329
|
+
* This type is inferred from the runArgsSchema and ensures type safety
|
|
4330
|
+
* when passing inputs to flow execution.
|
|
4331
|
+
*/
|
|
4332
|
+
type RunArgs = z.infer<typeof runArgsSchema>;
|
|
4333
|
+
//#endregion
|
|
4334
|
+
//#region src/flow/utils/file-naming.d.ts
|
|
4335
|
+
/**
|
|
4336
|
+
* Extracts the base name (without extension) from a filename.
|
|
4337
|
+
*
|
|
4338
|
+
* @param fileName - The full filename
|
|
4339
|
+
* @returns The filename without extension
|
|
4340
|
+
*
|
|
4341
|
+
* @example
|
|
4342
|
+
* ```typescript
|
|
4343
|
+
* getBaseName("photo.jpg") // "photo"
|
|
4344
|
+
* getBaseName("document.tar.gz") // "document.tar"
|
|
4345
|
+
* getBaseName("noextension") // "noextension"
|
|
4346
|
+
* ```
|
|
4347
|
+
*/
|
|
4348
|
+
declare function getBaseName(fileName: string): string;
|
|
4349
|
+
/**
|
|
4350
|
+
* Extracts the extension (without dot) from a filename.
|
|
4351
|
+
*
|
|
4352
|
+
* @param fileName - The full filename
|
|
4353
|
+
* @returns The extension without leading dot, or empty string if none
|
|
4354
|
+
*
|
|
4355
|
+
* @example
|
|
4356
|
+
* ```typescript
|
|
4357
|
+
* getExtension("photo.jpg") // "jpg"
|
|
4358
|
+
* getExtension("document.tar.gz") // "gz"
|
|
4359
|
+
* getExtension("noextension") // ""
|
|
4360
|
+
* ```
|
|
4361
|
+
*/
|
|
4362
|
+
declare function getExtension(fileName: string): string;
|
|
4363
|
+
/**
|
|
4364
|
+
* Builds a naming context from file and flow execution information.
|
|
4365
|
+
*
|
|
4366
|
+
* @param file - The UploadFile being processed
|
|
4367
|
+
* @param flowContext - Flow execution context (flowId, jobId, nodeId, nodeType)
|
|
4368
|
+
* @param extraVars - Additional variables to include (width, height, format, etc.)
|
|
4369
|
+
* @returns Complete naming context for template interpolation
|
|
4370
|
+
*
|
|
4371
|
+
* @example
|
|
4372
|
+
* ```typescript
|
|
4373
|
+
* const context = buildNamingContext(
|
|
4374
|
+
* uploadFile,
|
|
4375
|
+
* { flowId: "flow-1", jobId: "job-1", nodeId: "resize-1", nodeType: "resize" },
|
|
4376
|
+
* { width: 800, height: 600 }
|
|
4377
|
+
* );
|
|
4378
|
+
* // context.baseName = "photo"
|
|
4379
|
+
* // context.extension = "jpg"
|
|
4380
|
+
* // context.width = 800
|
|
4381
|
+
* // context.height = 600
|
|
4382
|
+
* ```
|
|
4383
|
+
*/
|
|
4384
|
+
declare function buildNamingContext(file: UploadFile, flowContext: {
|
|
4385
|
+
flowId: string;
|
|
4386
|
+
jobId: string;
|
|
4387
|
+
nodeId: string;
|
|
4388
|
+
nodeType: string;
|
|
4389
|
+
}, extraVars?: Record<string, string | number | undefined>): NamingContext;
|
|
4390
|
+
/**
|
|
4391
|
+
* Interpolates a mustache-style template with the given context.
|
|
4392
|
+
*
|
|
4393
|
+
* Uses micromustache for fast, secure template rendering.
|
|
4394
|
+
* Unknown variables are preserved as-is (e.g., {{unknown}} stays {{unknown}}).
|
|
4395
|
+
*
|
|
4396
|
+
* @param pattern - Mustache-style template string
|
|
4397
|
+
* @param context - Variables to interpolate
|
|
4398
|
+
* @returns Interpolated string
|
|
4399
|
+
*
|
|
4400
|
+
* @example
|
|
4401
|
+
* ```typescript
|
|
4402
|
+
* interpolateFileName(
|
|
4403
|
+
* "{{baseName}}-{{width}}x{{height}}.{{extension}}",
|
|
4404
|
+
* { baseName: "photo", width: 800, height: 600, extension: "jpg" }
|
|
4405
|
+
* );
|
|
4406
|
+
* // Returns: "photo-800x600.jpg"
|
|
4407
|
+
* ```
|
|
4408
|
+
*/
|
|
4409
|
+
declare function interpolateFileName(pattern: string, context: NamingContext): string;
|
|
4410
|
+
/**
|
|
4411
|
+
* Applies file naming configuration to generate a new filename.
|
|
4412
|
+
*
|
|
4413
|
+
* Handles three modes:
|
|
4414
|
+
* - No config: Returns original filename (backward compatible)
|
|
4415
|
+
* - Auto mode: Appends auto-generated suffix based on node type
|
|
4416
|
+
* - Custom mode: Uses template pattern or rename function
|
|
4417
|
+
*
|
|
4418
|
+
* On any error, falls back to the original filename to prevent flow failures.
|
|
4419
|
+
*
|
|
4420
|
+
* @param file - The UploadFile being processed
|
|
4421
|
+
* @param context - Naming context with all available variables
|
|
4422
|
+
* @param config - Optional naming configuration
|
|
4423
|
+
* @returns The new filename (or original on error/no config)
|
|
4424
|
+
*
|
|
4425
|
+
* @example
|
|
4426
|
+
* ```typescript
|
|
4427
|
+
* // Auto mode
|
|
4428
|
+
* applyFileNaming(file, context, {
|
|
4429
|
+
* mode: 'auto',
|
|
4430
|
+
* autoSuffix: (ctx) => `${ctx.width}x${ctx.height}`
|
|
4431
|
+
* });
|
|
4432
|
+
* // Returns: "photo-800x600.jpg"
|
|
4433
|
+
*
|
|
4434
|
+
* // Custom mode with template
|
|
4435
|
+
* applyFileNaming(file, context, {
|
|
4436
|
+
* mode: 'custom',
|
|
4437
|
+
* pattern: '{{baseName}}-processed.{{extension}}'
|
|
4438
|
+
* });
|
|
4439
|
+
* // Returns: "photo-processed.jpg"
|
|
4440
|
+
*
|
|
4441
|
+
* // Custom mode with function
|
|
4442
|
+
* applyFileNaming(file, context, {
|
|
4443
|
+
* mode: 'custom',
|
|
4444
|
+
* rename: (file, ctx) => `${ctx.flowId}-${ctx.fileName}`
|
|
4445
|
+
* });
|
|
4446
|
+
* // Returns: "flow-1-photo.jpg"
|
|
4447
|
+
* ```
|
|
4448
|
+
*/
|
|
4449
|
+
declare function applyFileNaming(file: UploadFile, context: NamingContext, config?: FileNamingConfig): string;
|
|
4450
|
+
/**
|
|
4451
|
+
* Validates a template pattern for common issues.
|
|
4452
|
+
*
|
|
4453
|
+
* Checks for:
|
|
4454
|
+
* - Balanced braces
|
|
4455
|
+
* - Non-empty pattern
|
|
4456
|
+
* - Valid variable names
|
|
4457
|
+
*
|
|
4458
|
+
* @param pattern - Template pattern to validate
|
|
4459
|
+
* @returns Object with isValid flag and optional error message
|
|
4460
|
+
*
|
|
4461
|
+
* @example
|
|
4462
|
+
* ```typescript
|
|
4463
|
+
* validatePattern("{{baseName}}.{{extension}}");
|
|
4464
|
+
* // { isValid: true }
|
|
4465
|
+
*
|
|
4466
|
+
* validatePattern("{{baseName");
|
|
4467
|
+
* // { isValid: false, error: "Unbalanced braces: missing closing }}" }
|
|
4468
|
+
* ```
|
|
4469
|
+
*/
|
|
4470
|
+
declare function validatePattern(pattern: string): {
|
|
4471
|
+
isValid: boolean;
|
|
4472
|
+
error?: string;
|
|
4473
|
+
};
|
|
4474
|
+
/**
|
|
4475
|
+
* List of available template variables for documentation and UI.
|
|
4476
|
+
*/
|
|
4477
|
+
declare const AVAILABLE_TEMPLATE_VARIABLES: readonly [{
|
|
4478
|
+
readonly name: "baseName";
|
|
4479
|
+
readonly description: "Filename without extension";
|
|
4480
|
+
readonly example: "photo";
|
|
4481
|
+
}, {
|
|
4482
|
+
readonly name: "extension";
|
|
4483
|
+
readonly description: "File extension without dot";
|
|
4484
|
+
readonly example: "jpg";
|
|
4485
|
+
}, {
|
|
4486
|
+
readonly name: "fileName";
|
|
4487
|
+
readonly description: "Full original filename";
|
|
4488
|
+
readonly example: "photo.jpg";
|
|
4489
|
+
}, {
|
|
4490
|
+
readonly name: "nodeType";
|
|
4491
|
+
readonly description: "Type of processing node";
|
|
4492
|
+
readonly example: "resize";
|
|
4493
|
+
}, {
|
|
4494
|
+
readonly name: "nodeId";
|
|
4495
|
+
readonly description: "Specific node instance ID";
|
|
4496
|
+
readonly example: "resize-1";
|
|
4497
|
+
}, {
|
|
4498
|
+
readonly name: "flowId";
|
|
4499
|
+
readonly description: "Flow identifier";
|
|
4500
|
+
readonly example: "flow-abc";
|
|
4501
|
+
}, {
|
|
4502
|
+
readonly name: "jobId";
|
|
4503
|
+
readonly description: "Execution job ID";
|
|
4504
|
+
readonly example: "job-123";
|
|
4505
|
+
}, {
|
|
4506
|
+
readonly name: "timestamp";
|
|
4507
|
+
readonly description: "ISO 8601 processing time";
|
|
4508
|
+
readonly example: "2024-01-15T10:30:00Z";
|
|
4509
|
+
}, {
|
|
4510
|
+
readonly name: "width";
|
|
4511
|
+
readonly description: "Output width (image/video)";
|
|
4512
|
+
readonly example: "800";
|
|
4513
|
+
}, {
|
|
4514
|
+
readonly name: "height";
|
|
4515
|
+
readonly description: "Output height (image/video)";
|
|
4516
|
+
readonly example: "600";
|
|
4517
|
+
}, {
|
|
4518
|
+
readonly name: "format";
|
|
4519
|
+
readonly description: "Output format";
|
|
4520
|
+
readonly example: "webp";
|
|
4521
|
+
}, {
|
|
4522
|
+
readonly name: "quality";
|
|
4523
|
+
readonly description: "Quality setting";
|
|
4524
|
+
readonly example: "80";
|
|
4525
|
+
}, {
|
|
4526
|
+
readonly name: "pageNumber";
|
|
4527
|
+
readonly description: "Page number (documents)";
|
|
4528
|
+
readonly example: "1";
|
|
4529
|
+
}];
|
|
4530
|
+
//#endregion
|
|
4531
|
+
//#region src/flow/utils/resolve-upload-metadata.d.ts
|
|
4532
|
+
type FileMetadata = UploadFile["metadata"];
|
|
4533
|
+
type ResolvedUploadMetadata = {
|
|
4534
|
+
type: string;
|
|
4535
|
+
fileName: string;
|
|
4536
|
+
metadata: FileMetadata;
|
|
4537
|
+
metadataJson: string | undefined;
|
|
4538
|
+
};
|
|
4539
|
+
declare function resolveUploadMetadata(metadata: FileMetadata): ResolvedUploadMetadata;
|
|
4540
|
+
//#endregion
|
|
4541
|
+
export { VideoPlugin as $, MemoryFlowQueueStore as $n, ImageAiPluginLayer as $t, isImageDescriptionOutput as A, ExtractLayerServices as An, DistributedCircuitBreaker as Ar, Transformation as At, describeImageParamsSchema as B, FlowQueueServiceShape as Bn, CircuitBreakerState as Br, rotateTransformSchema as Bt, createTypeGuard as C, StreamingTransformResult as Cn, FlowData as Cr, OverlayPosition as Ct, getSingleOutputByType as D, ExtractEffectError as Dn, FlowEdge as Dr, SharpenTransform as Dt, getOutputByNodeId as E, createTransformNode as En, getFlowData as Er, SepiaTransform as Et, isUploadOperation as F, createInputNode as Fn, memoryCircuitBreakerStoreLayer as Fr, contrastTransformSchema as Ft, ZipPlugin as G, FlowLifecycleHook as Gn, transformationSchema as Gt, PluginLayer as H, FlowEngineLayer as Hn, sharpenTransformSchema as Ht, isUrlOperation as I, inputDataSchema as In, CircuitBreakerConfig as Ir, flipTransformSchema as It, ScanMetadata as J, FlowWaitUntil as Jn, resizeParamsSchema as Jt, ZipPluginLayer as K, FlowProvider as Kn, watermarkTransformSchema as Kt, RemoveBackgroundParams as L, inputNodeParamsSchema as Ln, CircuitBreakerEvent as Lr, grayscaleTransformSchema as Lt, isOcrOutput as M, FlowCondition as Mn, kvCircuitBreakerStoreLayer as Mr, WatermarkTransform as Mt, isStorageOutput as N, InputData as Nn, makeKvCircuitBreakerStore as Nr, blurTransformSchema as Nt, hasOutputOfType as O, ExtractEffectRequirements as On, createFlowEdge as Or, TextTransform as Ot, isUploadFile as P, InputNodeParams as Pn, makeMemoryCircuitBreakerStore as Pr, brightnessTransformSchema as Pt, VirusScanPluginShape as Q, FlowQueueStore as Qn, ImageAiPlugin as Qt, removeBackgroundParamsSchema as R, FlowQueueDispatchMarker as Rn, CircuitBreakerEventHandler as Rr, logoTransformSchema as Rt, NarrowedTypedOutput as S, StreamingTransformFn as Sn, Flow as Sr, LogoTransform as St, getFirstOutputByType as T, TransformNodeConfig as Tn, createFlowWithSchema as Tr, RotateTransform as Tt, ZipInput as U, FlowEngineOptions as Un, textTransformSchema as Ut, Plugin as V, FlowEngine as Vn, DEFAULT_CIRCUIT_BREAKER_CONFIG as Vr, sepiaTransformSchema as Vt, ZipParams as W, FlowEngineShape as Wn, transformImageParamsSchema as Wt, VirusScanPlugin as X, createFlowEngine as Xn, optimizeParamsSchema as Xt, ScanResult as Y, WaitUntilCallback as Yn, OptimizeParams as Yt, VirusScanPluginLayer as Z, flowEngine as Zn, ImageAiContext as Zt, NodeDefinitionsRecord as _, CredentialProviderLayer as _n, InputTypeDefinition as _r, BlurTransform as _t, buildNamingContext as a, MergePdfParams as an, ImageDescriptionOutput as ar, trimVideoParamsSchema as at, TypedFlowEdge as b, ParallelScheduler as bn, inputTypeRegistry as br, FlipTransform as bt, interpolateFileName as c, DocumentAiContext as cn, STORAGE_OUTPUT_TYPE_ID as cr, ResizeVideoParams as ct, runArgsSchema as d, DocumentAiPluginShape as dn, ocrOutputSchema as dr, extractFrameVideoParamsSchema as dt, ImageAiPluginShape as en, DeadLetterQueueService as er, VideoPluginLayer as et, FlowInputMap as f, OcrParams as fn, OutputTypeDefinition as fr, DescribeVideoMetadata as ft, NodeDefinition as g, CredentialProvider as gn, validateFlowOutput as gr, ImagePluginShape as gt, FlowRequirements as h, OcrTaskType as hn, outputTypeRegistry as hr, ImagePluginLayer as ht, applyFileNaming as i, DocumentPluginShape as in, IMAGE_DESCRIPTION_OUTPUT_TYPE_ID as ir, TrimVideoParams as it, isInitOperation as j, ResolveEffect as jn, DistributedCircuitBreakerRegistry as jr, TransformationType as jt, isFinalizeOperation as k, ExtractLayerService as kn, AllowRequestResult as kr, TransformImageParams as kt, validatePattern as l, DocumentAiPlugin as ln, STREAMING_INPUT_TYPE_ID as lr, resizeVideoParamsSchema as lt, FlowPluginRequirements as m, OcrResult as mn, OutputValidationResult as mr, ImagePlugin as mt, resolveUploadMetadata as n, DocumentPlugin as nn, createDeadLetterQueueService as nr, VideoStreamInput as nt, getBaseName as o, SplitPdfParams as on, OCR_OUTPUT_TYPE_ID as or, TranscodeVideoParams as ot, FlowOutputMap as p, OcrResolution as pn, OutputTypeRegistry as pr, describeVideoMetadataSchema as pt, ZipPluginShape as q, FlowProviderShape as qn, ResizeParams as qt, AVAILABLE_TEMPLATE_VARIABLES as r, DocumentPluginLayer as rn, deadLetterQueueService as rr, VideoStreamOptions as rt, getExtension as s, SplitPdfResult as sn, OcrOutput as sr, transcodeVideoParamsSchema as st, ResolvedUploadMetadata as t, DocumentMetadata as tn, DeadLetterQueueServiceShape as tr, VideoPluginShape as tt, RunArgs as u, DocumentAiPluginLayer as un, imageDescriptionOutputSchema as ur, ExtractFrameVideoParams as ut, TypedFlow as v, CredentialProviderShape as vn, InputTypeRegistry as vr, BrightnessTransform as vt, filterOutputsByType as w, TransformMode as wn, FlowExecutionResult as wr, ResizeTransform as wt, createFlow as x, ParallelSchedulerConfig as xn, validateFlowInput as xr, GrayscaleTransform as xt, TypedFlowConfig as y, ExecutionLevel as yn, InputValidationResult as yr, ContrastTransform as yt, DescribeImageParams as z, FlowQueueService as zn, CircuitBreakerFallback as zr, resizeTransformSchema as zt };
|
|
4542
|
+
//# sourceMappingURL=resolve-upload-metadata-CYl2PHIs.d.mts.map
|