@uploadista/core 0.0.20-beta.7 → 0.0.20-beta.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/flow/index.cjs +1 -1
- package/dist/flow/index.d.cts +2 -2
- package/dist/flow/index.d.mts +2 -2
- package/dist/flow/index.mjs +1 -1
- package/dist/flow-BHVkk_6W.cjs +1 -0
- package/dist/{flow-_OmguvZm.mjs → flow-DlhHOlMk.mjs} +2 -2
- package/dist/flow-DlhHOlMk.mjs.map +1 -0
- package/dist/index-B9V5SSxl.d.mts.map +1 -1
- package/dist/{index-DjhpgW08.d.cts → index-DMqaf28W.d.cts} +1483 -1483
- package/dist/index-DMqaf28W.d.cts.map +1 -0
- package/dist/{index-BY620LiC.d.mts → index-RuQUCROH.d.mts} +1483 -1483
- package/dist/index-RuQUCROH.d.mts.map +1 -0
- package/dist/index.cjs +1 -1
- package/dist/index.d.cts +2 -2
- package/dist/index.d.mts +2 -2
- package/dist/index.mjs +1 -1
- package/dist/testing/index.cjs +2 -2
- package/dist/testing/index.d.cts +9 -9
- package/dist/testing/index.d.cts.map +1 -1
- package/dist/testing/index.d.mts +9 -9
- package/dist/testing/index.d.mts.map +1 -1
- package/dist/testing/index.mjs +2 -2
- package/dist/testing/index.mjs.map +1 -1
- package/dist/types/index.d.cts +1 -1
- package/dist/types/index.d.mts +1 -1
- package/dist/upload/index.cjs +1 -1
- package/dist/upload/index.d.cts +2 -2
- package/dist/upload/index.d.mts +2 -2
- package/dist/upload/index.mjs +1 -1
- package/dist/{upload-tLC7uR9U.mjs → upload-C-C7hn1-.mjs} +2 -2
- package/dist/{upload-tLC7uR9U.mjs.map → upload-C-C7hn1-.mjs.map} +1 -1
- package/dist/{upload-BHDuuJ80.cjs → upload-DWBlRXHh.cjs} +1 -1
- package/package.json +7 -7
- package/src/flow/{flow-server.ts → flow-engine.ts} +106 -106
- package/src/flow/index.ts +10 -10
- package/src/flow/nodes/input-node.ts +5 -5
- package/src/flow/nodes/transform-node.ts +11 -14
- package/src/flow/typed-flow.ts +22 -20
- package/src/testing/index.ts +1 -1
- package/src/testing/{mock-upload-server.ts → mock-upload-engine.ts} +10 -10
- package/src/upload/index.ts +1 -1
- package/src/upload/{upload-server.ts → upload-engine.ts} +44 -40
- package/dist/flow-Cv8vCBQ2.cjs +0 -1
- package/dist/flow-_OmguvZm.mjs.map +0 -1
- package/dist/index-BY620LiC.d.mts.map +0 -1
- package/dist/index-DjhpgW08.d.cts.map +0 -1
|
@@ -3520,6 +3520,534 @@ declare const imageDescriptionOutputSchema: z.ZodObject<{
|
|
|
3520
3520
|
}, z.core.$strip>;
|
|
3521
3521
|
type ImageDescriptionOutput = z.infer<typeof imageDescriptionOutputSchema>;
|
|
3522
3522
|
//#endregion
|
|
3523
|
+
//#region src/flow/types/dead-letter-item.d.ts
|
|
3524
|
+
/**
|
|
3525
|
+
* Dead Letter Queue item types and definitions.
|
|
3526
|
+
*
|
|
3527
|
+
* A DeadLetterItem represents a failed flow job that has been captured
|
|
3528
|
+
* for later retry, debugging, or manual intervention.
|
|
3529
|
+
*
|
|
3530
|
+
* @module flow/types/dead-letter-item
|
|
3531
|
+
* @see {@link DeadLetterQueueService} for DLQ operations
|
|
3532
|
+
*/
|
|
3533
|
+
/**
|
|
3534
|
+
* Status of a Dead Letter Queue item.
|
|
3535
|
+
*
|
|
3536
|
+
* Item lifecycle: pending → retrying → (pending | exhausted | resolved)
|
|
3537
|
+
*
|
|
3538
|
+
* - `pending`: Awaiting retry (either scheduled or manual)
|
|
3539
|
+
* - `retrying`: Currently being retried
|
|
3540
|
+
* - `exhausted`: Max retries reached, requires manual intervention
|
|
3541
|
+
* - `resolved`: Successfully retried or manually resolved
|
|
3542
|
+
*/
|
|
3543
|
+
type DeadLetterItemStatus = "pending" | "retrying" | "exhausted" | "resolved";
|
|
3544
|
+
/**
|
|
3545
|
+
* Error details captured when a flow job fails.
|
|
3546
|
+
*
|
|
3547
|
+
* Contains comprehensive error information for debugging and retry decisions.
|
|
3548
|
+
*
|
|
3549
|
+
* @property code - Error code (e.g., "FLOW_NODE_ERROR", "VALIDATION_ERROR")
|
|
3550
|
+
* @property message - Human-readable error message
|
|
3551
|
+
* @property nodeId - ID of the node that failed (if applicable)
|
|
3552
|
+
* @property stack - Stack trace (included in development mode)
|
|
3553
|
+
*/
|
|
3554
|
+
interface DeadLetterError {
|
|
3555
|
+
/** Error code for categorization and retry filtering */
|
|
3556
|
+
code: string;
|
|
3557
|
+
/** Human-readable error message */
|
|
3558
|
+
message: string;
|
|
3559
|
+
/** Node that failed (if applicable) */
|
|
3560
|
+
nodeId?: string;
|
|
3561
|
+
/** Stack trace (in dev mode only) */
|
|
3562
|
+
stack?: string;
|
|
3563
|
+
}
|
|
3564
|
+
/**
|
|
3565
|
+
* Record of a single retry attempt.
|
|
3566
|
+
*
|
|
3567
|
+
* @property attemptedAt - When the retry was attempted
|
|
3568
|
+
* @property error - Error message if the retry failed
|
|
3569
|
+
* @property durationMs - How long the retry took
|
|
3570
|
+
*/
|
|
3571
|
+
interface DeadLetterRetryAttempt {
|
|
3572
|
+
/** When the retry was attempted */
|
|
3573
|
+
attemptedAt: Date;
|
|
3574
|
+
/** Error message if the retry failed */
|
|
3575
|
+
error: string;
|
|
3576
|
+
/** Duration of the retry attempt in milliseconds */
|
|
3577
|
+
durationMs: number;
|
|
3578
|
+
}
|
|
3579
|
+
/**
|
|
3580
|
+
* Represents a failed flow job captured in the Dead Letter Queue.
|
|
3581
|
+
*
|
|
3582
|
+
* Contains all information needed to debug, retry, or manually resolve
|
|
3583
|
+
* a failed flow execution.
|
|
3584
|
+
*
|
|
3585
|
+
* @property id - Unique DLQ item identifier
|
|
3586
|
+
* @property jobId - Original flow job ID that failed
|
|
3587
|
+
* @property flowId - Flow definition that was being executed
|
|
3588
|
+
* @property storageId - Target storage for the flow
|
|
3589
|
+
* @property clientId - Client who initiated the job
|
|
3590
|
+
* @property error - Comprehensive error details
|
|
3591
|
+
* @property inputs - Original inputs passed to the flow
|
|
3592
|
+
* @property nodeResults - Partial results from nodes that completed before failure
|
|
3593
|
+
* @property failedAtNodeId - Node where execution failed (if applicable)
|
|
3594
|
+
* @property retryCount - Number of retry attempts made
|
|
3595
|
+
* @property maxRetries - Maximum retries allowed from retry policy
|
|
3596
|
+
* @property nextRetryAt - Scheduled time for next automatic retry
|
|
3597
|
+
* @property retryHistory - History of all retry attempts
|
|
3598
|
+
* @property createdAt - When the item was added to DLQ
|
|
3599
|
+
* @property updatedAt - When the item was last modified
|
|
3600
|
+
* @property expiresAt - TTL for automatic cleanup
|
|
3601
|
+
* @property status - Current status of the DLQ item
|
|
3602
|
+
*
|
|
3603
|
+
* @example
|
|
3604
|
+
* ```typescript
|
|
3605
|
+
* const dlqItem: DeadLetterItem = {
|
|
3606
|
+
* id: "dlq_abc123",
|
|
3607
|
+
* jobId: "job_xyz789",
|
|
3608
|
+
* flowId: "image-resize-pipeline",
|
|
3609
|
+
* storageId: "s3-production",
|
|
3610
|
+
* clientId: "client_456",
|
|
3611
|
+
* error: {
|
|
3612
|
+
* code: "FLOW_NODE_ERROR",
|
|
3613
|
+
* message: "External service timeout",
|
|
3614
|
+
* nodeId: "resize-node"
|
|
3615
|
+
* },
|
|
3616
|
+
* inputs: { input: { uploadId: "upload_123" } },
|
|
3617
|
+
* nodeResults: { "input-node": { file: {...} } },
|
|
3618
|
+
* failedAtNodeId: "resize-node",
|
|
3619
|
+
* retryCount: 2,
|
|
3620
|
+
* maxRetries: 3,
|
|
3621
|
+
* nextRetryAt: new Date("2024-01-15T10:35:00Z"),
|
|
3622
|
+
* retryHistory: [
|
|
3623
|
+
* { attemptedAt: new Date("2024-01-15T10:30:00Z"), error: "Timeout", durationMs: 5000 },
|
|
3624
|
+
* { attemptedAt: new Date("2024-01-15T10:32:00Z"), error: "Timeout", durationMs: 5000 }
|
|
3625
|
+
* ],
|
|
3626
|
+
* createdAt: new Date("2024-01-15T10:30:00Z"),
|
|
3627
|
+
* updatedAt: new Date("2024-01-15T10:32:00Z"),
|
|
3628
|
+
* expiresAt: new Date("2024-01-22T10:30:00Z"),
|
|
3629
|
+
* status: "pending"
|
|
3630
|
+
* };
|
|
3631
|
+
* ```
|
|
3632
|
+
*/
|
|
3633
|
+
interface DeadLetterItem {
|
|
3634
|
+
/** Unique DLQ item identifier */
|
|
3635
|
+
id: string;
|
|
3636
|
+
/** Original flow job ID that failed */
|
|
3637
|
+
jobId: string;
|
|
3638
|
+
/** Flow definition ID that was being executed */
|
|
3639
|
+
flowId: string;
|
|
3640
|
+
/** Target storage for the flow */
|
|
3641
|
+
storageId: string;
|
|
3642
|
+
/** Client who initiated the job (null for anonymous) */
|
|
3643
|
+
clientId: string | null;
|
|
3644
|
+
/** Comprehensive error details */
|
|
3645
|
+
error: DeadLetterError;
|
|
3646
|
+
/** Original inputs passed to the flow */
|
|
3647
|
+
inputs: Record<string, unknown>;
|
|
3648
|
+
/** Partial results from nodes that completed before failure */
|
|
3649
|
+
nodeResults: Record<string, unknown>;
|
|
3650
|
+
/** Node where execution failed (if applicable) */
|
|
3651
|
+
failedAtNodeId?: string;
|
|
3652
|
+
/** Number of retry attempts made */
|
|
3653
|
+
retryCount: number;
|
|
3654
|
+
/** Maximum retries allowed from retry policy */
|
|
3655
|
+
maxRetries: number;
|
|
3656
|
+
/** Scheduled time for next automatic retry */
|
|
3657
|
+
nextRetryAt?: Date;
|
|
3658
|
+
/** History of all retry attempts */
|
|
3659
|
+
retryHistory: DeadLetterRetryAttempt[];
|
|
3660
|
+
/** When the item was added to DLQ */
|
|
3661
|
+
createdAt: Date;
|
|
3662
|
+
/** When the item was last modified */
|
|
3663
|
+
updatedAt: Date;
|
|
3664
|
+
/** TTL for automatic cleanup */
|
|
3665
|
+
expiresAt?: Date;
|
|
3666
|
+
/** Current status of the DLQ item */
|
|
3667
|
+
status: DeadLetterItemStatus;
|
|
3668
|
+
}
|
|
3669
|
+
/**
|
|
3670
|
+
* Statistics about the Dead Letter Queue.
|
|
3671
|
+
*
|
|
3672
|
+
* Provides aggregate information for monitoring and alerting.
|
|
3673
|
+
*
|
|
3674
|
+
* @property totalItems - Total number of items in the DLQ
|
|
3675
|
+
* @property byStatus - Count of items by status
|
|
3676
|
+
* @property byFlow - Count of items by flow ID
|
|
3677
|
+
* @property oldestItem - Timestamp of the oldest item
|
|
3678
|
+
* @property averageRetryCount - Average number of retries across all items
|
|
3679
|
+
*/
|
|
3680
|
+
interface DeadLetterQueueStats {
|
|
3681
|
+
/** Total number of items in the DLQ */
|
|
3682
|
+
totalItems: number;
|
|
3683
|
+
/** Count of items by status */
|
|
3684
|
+
byStatus: Record<DeadLetterItemStatus, number>;
|
|
3685
|
+
/** Count of items by flow ID */
|
|
3686
|
+
byFlow: Record<string, number>;
|
|
3687
|
+
/** Timestamp of the oldest item */
|
|
3688
|
+
oldestItem?: Date;
|
|
3689
|
+
/** Average number of retries across all items */
|
|
3690
|
+
averageRetryCount: number;
|
|
3691
|
+
}
|
|
3692
|
+
/**
|
|
3693
|
+
* Options for listing DLQ items.
|
|
3694
|
+
*
|
|
3695
|
+
* @property status - Filter by status
|
|
3696
|
+
* @property flowId - Filter by flow ID
|
|
3697
|
+
* @property clientId - Filter by client ID
|
|
3698
|
+
* @property limit - Maximum items to return (default: 50)
|
|
3699
|
+
* @property offset - Number of items to skip (default: 0)
|
|
3700
|
+
*/
|
|
3701
|
+
interface DeadLetterListOptions {
|
|
3702
|
+
/** Filter by status */
|
|
3703
|
+
status?: DeadLetterItemStatus;
|
|
3704
|
+
/** Filter by flow ID */
|
|
3705
|
+
flowId?: string;
|
|
3706
|
+
/** Filter by client ID */
|
|
3707
|
+
clientId?: string;
|
|
3708
|
+
/** Maximum items to return (default: 50) */
|
|
3709
|
+
limit?: number;
|
|
3710
|
+
/** Number of items to skip for pagination (default: 0) */
|
|
3711
|
+
offset?: number;
|
|
3712
|
+
}
|
|
3713
|
+
/**
|
|
3714
|
+
* Result of a batch retry operation.
|
|
3715
|
+
*
|
|
3716
|
+
* @property retried - Number of items that were retried
|
|
3717
|
+
* @property succeeded - Number of retries that succeeded
|
|
3718
|
+
* @property failed - Number of retries that failed
|
|
3719
|
+
*/
|
|
3720
|
+
interface DeadLetterRetryAllResult {
|
|
3721
|
+
/** Number of items that were retried */
|
|
3722
|
+
retried: number;
|
|
3723
|
+
/** Number of retries that succeeded */
|
|
3724
|
+
succeeded: number;
|
|
3725
|
+
/** Number of retries that failed */
|
|
3726
|
+
failed: number;
|
|
3727
|
+
}
|
|
3728
|
+
/**
|
|
3729
|
+
* Result of a cleanup operation.
|
|
3730
|
+
*
|
|
3731
|
+
* @property deleted - Number of items that were deleted
|
|
3732
|
+
*/
|
|
3733
|
+
interface DeadLetterCleanupResult {
|
|
3734
|
+
/** Number of items that were deleted */
|
|
3735
|
+
deleted: number;
|
|
3736
|
+
}
|
|
3737
|
+
/**
|
|
3738
|
+
* Options for cleanup operation.
|
|
3739
|
+
*
|
|
3740
|
+
* @property olderThan - Delete items older than this date
|
|
3741
|
+
* @property status - Only delete items with this status
|
|
3742
|
+
*/
|
|
3743
|
+
interface DeadLetterCleanupOptions {
|
|
3744
|
+
/** Delete items older than this date */
|
|
3745
|
+
olderThan?: Date;
|
|
3746
|
+
/** Only delete items with this status */
|
|
3747
|
+
status?: "exhausted" | "resolved";
|
|
3748
|
+
}
|
|
3749
|
+
/**
|
|
3750
|
+
* Result of processing scheduled retries.
|
|
3751
|
+
*
|
|
3752
|
+
* @property processed - Total items processed
|
|
3753
|
+
* @property succeeded - Number that succeeded
|
|
3754
|
+
* @property failed - Number that failed
|
|
3755
|
+
*/
|
|
3756
|
+
interface DeadLetterProcessResult {
|
|
3757
|
+
/** Total items processed */
|
|
3758
|
+
processed: number;
|
|
3759
|
+
/** Number of successful retries */
|
|
3760
|
+
succeeded: number;
|
|
3761
|
+
/** Number of failed retries */
|
|
3762
|
+
failed: number;
|
|
3763
|
+
}
|
|
3764
|
+
//#endregion
|
|
3765
|
+
//#region src/flow/types/flow-job.d.ts
|
|
3766
|
+
/**
|
|
3767
|
+
* Status of an individual node within a flow job.
|
|
3768
|
+
*
|
|
3769
|
+
* Node tasks follow this lifecycle:
|
|
3770
|
+
* started → pending → running → (completed | paused | failed)
|
|
3771
|
+
*/
|
|
3772
|
+
type FlowJobTaskStatus = "started" | "pending" | "running" | "completed" | "paused" | "failed";
|
|
3773
|
+
/**
|
|
3774
|
+
* Represents a single node's execution within a flow job.
|
|
3775
|
+
*
|
|
3776
|
+
* Tasks track individual node execution, storing results, errors, and retry information.
|
|
3777
|
+
* They allow monitoring of which nodes have completed and accessing intermediate results.
|
|
3778
|
+
*
|
|
3779
|
+
* @property nodeId - Unique identifier of the node this task represents
|
|
3780
|
+
* @property status - Current execution status of the node
|
|
3781
|
+
* @property result - Node execution result data (can be partial data if paused, or complete data if finished)
|
|
3782
|
+
* @property error - Error message if the node failed
|
|
3783
|
+
* @property retryCount - Number of retry attempts made before success or final failure
|
|
3784
|
+
* @property createdAt - When the task was created
|
|
3785
|
+
* @property updatedAt - When the task was last updated
|
|
3786
|
+
*
|
|
3787
|
+
* @remarks
|
|
3788
|
+
* The result field can contain:
|
|
3789
|
+
* - Partial/intermediate data when status is "paused" (unknown type)
|
|
3790
|
+
* - Complete data when status is "completed" (could be TypedOutput for output nodes)
|
|
3791
|
+
* - undefined when status is "pending", "running", "started", or "failed"
|
|
3792
|
+
*
|
|
3793
|
+
* For type-safe access to final outputs, use FlowJob.result instead, which contains
|
|
3794
|
+
* the array of TypedOutput from all output nodes.
|
|
3795
|
+
*/
|
|
3796
|
+
type FlowJobTask = {
|
|
3797
|
+
nodeId: string;
|
|
3798
|
+
status: FlowJobTaskStatus;
|
|
3799
|
+
result?: unknown;
|
|
3800
|
+
error?: string;
|
|
3801
|
+
retryCount?: number;
|
|
3802
|
+
createdAt: Date;
|
|
3803
|
+
updatedAt: Date;
|
|
3804
|
+
};
|
|
3805
|
+
/**
|
|
3806
|
+
* Represents a flow execution job with full state tracking.
|
|
3807
|
+
*
|
|
3808
|
+
* Jobs are created when a flow is executed and track the entire execution lifecycle.
|
|
3809
|
+
* They store node results, handle paused states, and manage cleanup of intermediate files.
|
|
3810
|
+
*
|
|
3811
|
+
* @property id - Unique job identifier (UUID)
|
|
3812
|
+
* @property flowId - The flow being executed
|
|
3813
|
+
* @property storageId - Storage location for file outputs
|
|
3814
|
+
* @property clientId - Client that initiated the job (for authorization)
|
|
3815
|
+
* @property status - Overall job status
|
|
3816
|
+
* @property createdAt - When the job was created
|
|
3817
|
+
* @property updatedAt - When the job was last updated
|
|
3818
|
+
* @property tasks - Array of node execution tasks
|
|
3819
|
+
* @property error - Error message if the job failed
|
|
3820
|
+
* @property endedAt - When the job completed or failed
|
|
3821
|
+
* @property result - Array of typed outputs from all output nodes (only set when completed)
|
|
3822
|
+
* @property pausedAt - Node ID where execution is paused (if applicable)
|
|
3823
|
+
* @property executionState - State needed to resume a paused flow
|
|
3824
|
+
* @property intermediateFiles - File IDs to cleanup after completion
|
|
3825
|
+
*
|
|
3826
|
+
* @remarks
|
|
3827
|
+
* - Jobs can be paused at nodes that return `{ type: "waiting" }`
|
|
3828
|
+
* - Paused jobs store execution state and can be resumed with new data
|
|
3829
|
+
* - Intermediate files from non-output nodes are automatically cleaned up
|
|
3830
|
+
* - Tasks are updated as nodes progress through their lifecycle
|
|
3831
|
+
* - The result field now contains an array of TypedOutput for all output nodes
|
|
3832
|
+
*
|
|
3833
|
+
* @example
|
|
3834
|
+
* ```typescript
|
|
3835
|
+
* // Create and monitor a job
|
|
3836
|
+
* const job = yield* flowServer.runFlow({
|
|
3837
|
+
* flowId: "image-pipeline",
|
|
3838
|
+
* storageId: "storage-1",
|
|
3839
|
+
* inputs: { input: myFile }
|
|
3840
|
+
* });
|
|
3841
|
+
*
|
|
3842
|
+
* // Poll for status
|
|
3843
|
+
* const status = yield* flowServer.getJobStatus(job.id);
|
|
3844
|
+
* if (status.status === "completed") {
|
|
3845
|
+
* // Access typed outputs
|
|
3846
|
+
* console.log("Outputs:", status.result);
|
|
3847
|
+
* for (const output of status.result || []) {
|
|
3848
|
+
* console.log(`${output.nodeId} (${output.nodeType}):`, output.data);
|
|
3849
|
+
* }
|
|
3850
|
+
* } else if (status.status === "paused") {
|
|
3851
|
+
* // Resume with additional data
|
|
3852
|
+
* yield* flowServer.resumeFlow({
|
|
3853
|
+
* jobId: job.id,
|
|
3854
|
+
* nodeId: status.pausedAt,
|
|
3855
|
+
* newData: additionalChunk
|
|
3856
|
+
* });
|
|
3857
|
+
* }
|
|
3858
|
+
* ```
|
|
3859
|
+
*/
|
|
3860
|
+
/**
|
|
3861
|
+
* Trace context for distributed tracing.
|
|
3862
|
+
* Allows flow operations to be linked under a single trace.
|
|
3863
|
+
*/
|
|
3864
|
+
type FlowJobTraceContext = {
|
|
3865
|
+
/** 128-bit trace identifier (32 hex characters) */
|
|
3866
|
+
traceId: string;
|
|
3867
|
+
/** 64-bit span identifier (16 hex characters) */
|
|
3868
|
+
spanId: string;
|
|
3869
|
+
/** Trace flags (1 = sampled) */
|
|
3870
|
+
traceFlags: number;
|
|
3871
|
+
};
|
|
3872
|
+
type FlowJob = {
|
|
3873
|
+
id: string;
|
|
3874
|
+
flowId: string;
|
|
3875
|
+
storageId: string;
|
|
3876
|
+
clientId: string | null;
|
|
3877
|
+
status: FlowJobStatus;
|
|
3878
|
+
createdAt: Date;
|
|
3879
|
+
updatedAt: Date;
|
|
3880
|
+
tasks: FlowJobTask[];
|
|
3881
|
+
error?: string;
|
|
3882
|
+
endedAt?: Date;
|
|
3883
|
+
result?: TypedOutput[];
|
|
3884
|
+
pausedAt?: string;
|
|
3885
|
+
executionState?: {
|
|
3886
|
+
executionOrder: string[];
|
|
3887
|
+
currentIndex: number;
|
|
3888
|
+
inputs: Record<string, unknown>;
|
|
3889
|
+
};
|
|
3890
|
+
intermediateFiles?: string[];
|
|
3891
|
+
/**
|
|
3892
|
+
* OpenTelemetry trace context for distributed tracing.
|
|
3893
|
+
* When set, all flow operations (node executions, uploads) will be
|
|
3894
|
+
* linked as children of this trace context. Enables end-to-end tracing
|
|
3895
|
+
* of flow executions in observability tools like Grafana Tempo.
|
|
3896
|
+
*/
|
|
3897
|
+
traceContext?: FlowJobTraceContext;
|
|
3898
|
+
};
|
|
3899
|
+
/**
|
|
3900
|
+
* Overall status of a flow job.
|
|
3901
|
+
*
|
|
3902
|
+
* Job lifecycle: started → running → (completed | failed | cancelled)
|
|
3903
|
+
* Or with pauses: started → running → paused → running → (completed | failed | cancelled)
|
|
3904
|
+
* User actions: running → paused (via pauseFlow) or running → cancelled (via cancelFlow)
|
|
3905
|
+
*/
|
|
3906
|
+
type FlowJobStatus = "pending" | "running" | "completed" | "failed" | "started" | "paused" | "cancelled";
|
|
3907
|
+
//#endregion
|
|
3908
|
+
//#region src/flow/dead-letter-queue.d.ts
|
|
3909
|
+
/**
|
|
3910
|
+
* Shape of the Dead Letter Queue service.
|
|
3911
|
+
*
|
|
3912
|
+
* Provides all operations for managing failed flow jobs including
|
|
3913
|
+
* adding items, querying, retrying, and cleanup.
|
|
3914
|
+
*/
|
|
3915
|
+
interface DeadLetterQueueServiceShape {
|
|
3916
|
+
/**
|
|
3917
|
+
* Add a failed job to the DLQ with full failure context.
|
|
3918
|
+
*
|
|
3919
|
+
* @param job - The failed flow job
|
|
3920
|
+
* @param error - The error that caused the failure
|
|
3921
|
+
* @param retryPolicy - Optional retry policy (uses default if not provided)
|
|
3922
|
+
* @returns The created DLQ item
|
|
3923
|
+
*/
|
|
3924
|
+
add(job: FlowJob, error: UploadistaError, retryPolicy?: RetryPolicy): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
3925
|
+
/**
|
|
3926
|
+
* Get a specific DLQ item by ID.
|
|
3927
|
+
*
|
|
3928
|
+
* @param itemId - The DLQ item ID
|
|
3929
|
+
* @returns The DLQ item
|
|
3930
|
+
*/
|
|
3931
|
+
get(itemId: string): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
3932
|
+
/**
|
|
3933
|
+
* Get a DLQ item by ID, returning None if not found.
|
|
3934
|
+
*
|
|
3935
|
+
* @param itemId - The DLQ item ID
|
|
3936
|
+
* @returns Option of the DLQ item
|
|
3937
|
+
*/
|
|
3938
|
+
getOption(itemId: string): Effect.Effect<Option.Option<DeadLetterItem>, UploadistaError>;
|
|
3939
|
+
/**
|
|
3940
|
+
* Delete a DLQ item.
|
|
3941
|
+
*
|
|
3942
|
+
* @param itemId - The DLQ item ID to delete
|
|
3943
|
+
*/
|
|
3944
|
+
delete(itemId: string): Effect.Effect<void, UploadistaError>;
|
|
3945
|
+
/**
|
|
3946
|
+
* List DLQ items with optional filtering and pagination.
|
|
3947
|
+
*
|
|
3948
|
+
* @param options - Filter and pagination options
|
|
3949
|
+
* @returns List of items and total count
|
|
3950
|
+
*/
|
|
3951
|
+
list(options?: DeadLetterListOptions): Effect.Effect<{
|
|
3952
|
+
items: DeadLetterItem[];
|
|
3953
|
+
total: number;
|
|
3954
|
+
}, UploadistaError>;
|
|
3955
|
+
/**
|
|
3956
|
+
* Update a DLQ item.
|
|
3957
|
+
*
|
|
3958
|
+
* @param itemId - The DLQ item ID
|
|
3959
|
+
* @param updates - Partial updates to apply
|
|
3960
|
+
* @returns The updated item
|
|
3961
|
+
*/
|
|
3962
|
+
update(itemId: string, updates: Partial<DeadLetterItem>): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
3963
|
+
/**
|
|
3964
|
+
* Mark a DLQ item as being retried.
|
|
3965
|
+
*
|
|
3966
|
+
* @param itemId - The DLQ item ID
|
|
3967
|
+
* @returns The updated item with status "retrying"
|
|
3968
|
+
*/
|
|
3969
|
+
markRetrying(itemId: string): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
3970
|
+
/**
|
|
3971
|
+
* Record a failed retry attempt.
|
|
3972
|
+
*
|
|
3973
|
+
* @param itemId - The DLQ item ID
|
|
3974
|
+
* @param error - Error message from the failed retry
|
|
3975
|
+
* @param durationMs - Duration of the retry attempt
|
|
3976
|
+
* @returns The updated item
|
|
3977
|
+
*/
|
|
3978
|
+
recordRetryFailure(itemId: string, error: string, durationMs: number): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
3979
|
+
/**
|
|
3980
|
+
* Mark a DLQ item as resolved (successfully retried or manually resolved).
|
|
3981
|
+
*
|
|
3982
|
+
* @param itemId - The DLQ item ID
|
|
3983
|
+
* @returns The updated item with status "resolved"
|
|
3984
|
+
*/
|
|
3985
|
+
markResolved(itemId: string): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
3986
|
+
/**
|
|
3987
|
+
* Get items that are due for scheduled retry.
|
|
3988
|
+
*
|
|
3989
|
+
* @param limit - Maximum number of items to return
|
|
3990
|
+
* @returns List of items ready for retry
|
|
3991
|
+
*/
|
|
3992
|
+
getScheduledRetries(limit?: number): Effect.Effect<DeadLetterItem[], UploadistaError>;
|
|
3993
|
+
/**
|
|
3994
|
+
* Cleanup old DLQ items based on options.
|
|
3995
|
+
*
|
|
3996
|
+
* @param options - Cleanup criteria
|
|
3997
|
+
* @returns Number of items deleted
|
|
3998
|
+
*/
|
|
3999
|
+
cleanup(options?: DeadLetterCleanupOptions): Effect.Effect<DeadLetterCleanupResult, UploadistaError>;
|
|
4000
|
+
/**
|
|
4001
|
+
* Get DLQ statistics.
|
|
4002
|
+
*
|
|
4003
|
+
* @returns Aggregate statistics about the DLQ
|
|
4004
|
+
*/
|
|
4005
|
+
getStats(): Effect.Effect<DeadLetterQueueStats, UploadistaError>;
|
|
4006
|
+
}
|
|
4007
|
+
declare const DeadLetterQueueService_base: Context.TagClass<DeadLetterQueueService, "DeadLetterQueueService", DeadLetterQueueServiceShape>;
|
|
4008
|
+
/**
|
|
4009
|
+
* Effect-TS context tag for the Dead Letter Queue service.
|
|
4010
|
+
*
|
|
4011
|
+
* @example
|
|
4012
|
+
* ```typescript
|
|
4013
|
+
* const effect = Effect.gen(function* () {
|
|
4014
|
+
* const dlq = yield* DeadLetterQueueService;
|
|
4015
|
+
* const stats = yield* dlq.getStats();
|
|
4016
|
+
* console.log(`DLQ has ${stats.totalItems} items`);
|
|
4017
|
+
* });
|
|
4018
|
+
* ```
|
|
4019
|
+
*/
|
|
4020
|
+
declare class DeadLetterQueueService extends DeadLetterQueueService_base {
|
|
4021
|
+
/**
|
|
4022
|
+
* Access the DLQ service optionally (for integration in FlowServer).
|
|
4023
|
+
* Returns Option.none if the service is not provided.
|
|
4024
|
+
*/
|
|
4025
|
+
static optional: Effect.Effect<Option.Option<DeadLetterQueueServiceShape>, never, never>;
|
|
4026
|
+
}
|
|
4027
|
+
/**
|
|
4028
|
+
* Creates the Dead Letter Queue service implementation.
|
|
4029
|
+
*
|
|
4030
|
+
* @returns Effect that creates the DLQ service
|
|
4031
|
+
*/
|
|
4032
|
+
declare function createDeadLetterQueueService(): Effect.Effect<DeadLetterQueueServiceShape, never, DeadLetterQueueKVStore>;
|
|
4033
|
+
/**
|
|
4034
|
+
* Effect Layer that creates the DeadLetterQueueService.
|
|
4035
|
+
*
|
|
4036
|
+
* @example
|
|
4037
|
+
* ```typescript
|
|
4038
|
+
* const program = Effect.gen(function* () {
|
|
4039
|
+
* const dlq = yield* DeadLetterQueueService;
|
|
4040
|
+
* const stats = yield* dlq.getStats();
|
|
4041
|
+
* return stats;
|
|
4042
|
+
* }).pipe(
|
|
4043
|
+
* Effect.provide(deadLetterQueueService),
|
|
4044
|
+
* Effect.provide(deadLetterQueueKvStore),
|
|
4045
|
+
* Effect.provide(baseStoreLayer)
|
|
4046
|
+
* );
|
|
4047
|
+
* ```
|
|
4048
|
+
*/
|
|
4049
|
+
declare const deadLetterQueueService: Layer.Layer<DeadLetterQueueService, never, DeadLetterQueueKVStore>;
|
|
4050
|
+
//#endregion
|
|
3523
4051
|
//#region src/types/event-broadcaster.d.ts
|
|
3524
4052
|
/**
|
|
3525
4053
|
* Event broadcaster interface for pub/sub messaging across distributed instances.
|
|
@@ -4496,9 +5024,9 @@ declare const detectMimeType: (buffer: Uint8Array, filename?: string) => string;
|
|
|
4496
5024
|
*/
|
|
4497
5025
|
declare function compareMimeTypes(declared: string, detected: string): boolean;
|
|
4498
5026
|
//#endregion
|
|
4499
|
-
//#region src/upload/upload-
|
|
5027
|
+
//#region src/upload/upload-engine.d.ts
|
|
4500
5028
|
/**
|
|
4501
|
-
* Legacy configuration options for
|
|
5029
|
+
* Legacy configuration options for UploadEngine.
|
|
4502
5030
|
*
|
|
4503
5031
|
* @deprecated Use Effect Layers instead of this configuration object.
|
|
4504
5032
|
* This type is kept for backward compatibility.
|
|
@@ -4510,7 +5038,7 @@ declare function compareMimeTypes(declared: string, detected: string): boolean;
|
|
|
4510
5038
|
* @property middlewares - Optional request middlewares
|
|
4511
5039
|
* @property withTracing - Enable Effect tracing for debugging
|
|
4512
5040
|
*/
|
|
4513
|
-
type
|
|
5041
|
+
type UploadEngineOptions = {
|
|
4514
5042
|
dataStore: ((storageId: string) => Promise<DataStore<UploadFile>>) | DataStore<UploadFile>;
|
|
4515
5043
|
kvStore: KvStore<UploadFile>;
|
|
4516
5044
|
eventEmitter: EventEmitter<UploadEvent>;
|
|
@@ -4519,7 +5047,7 @@ type UploadServerOptions = {
|
|
|
4519
5047
|
withTracing?: boolean;
|
|
4520
5048
|
};
|
|
4521
5049
|
/**
|
|
4522
|
-
*
|
|
5050
|
+
* UploadEngine service interface.
|
|
4523
5051
|
*
|
|
4524
5052
|
* This is the core upload handling service that provides all file upload operations.
|
|
4525
5053
|
* It manages upload lifecycle, resumable uploads, progress tracking, and storage integration.
|
|
@@ -4541,7 +5069,7 @@ type UploadServerOptions = {
|
|
|
4541
5069
|
* ```typescript
|
|
4542
5070
|
* // Basic upload flow
|
|
4543
5071
|
* const program = Effect.gen(function* () {
|
|
4544
|
-
* const server = yield*
|
|
5072
|
+
* const server = yield* UploadEngine;
|
|
4545
5073
|
*
|
|
4546
5074
|
* // 1. Create upload
|
|
4547
5075
|
* const inputFile: InputFile = {
|
|
@@ -4564,7 +5092,7 @@ type UploadServerOptions = {
|
|
|
4564
5092
|
*
|
|
4565
5093
|
* // Upload with WebSocket progress tracking
|
|
4566
5094
|
* const uploadWithProgress = Effect.gen(function* () {
|
|
4567
|
-
* const server = yield*
|
|
5095
|
+
* const server = yield* UploadEngine;
|
|
4568
5096
|
*
|
|
4569
5097
|
* // Subscribe to progress events
|
|
4570
5098
|
* yield* server.subscribeToUploadEvents(uploadId, websocket);
|
|
@@ -4580,7 +5108,7 @@ type UploadServerOptions = {
|
|
|
4580
5108
|
*
|
|
4581
5109
|
* // Upload from URL
|
|
4582
5110
|
* const urlUpload = Effect.gen(function* () {
|
|
4583
|
-
* const server = yield*
|
|
5111
|
+
* const server = yield* UploadEngine;
|
|
4584
5112
|
*
|
|
4585
5113
|
* const inputFile: InputFile = {
|
|
4586
5114
|
* storageId: "s3-production",
|
|
@@ -4599,7 +5127,7 @@ type UploadServerOptions = {
|
|
|
4599
5127
|
* });
|
|
4600
5128
|
* ```
|
|
4601
5129
|
*/
|
|
4602
|
-
type
|
|
5130
|
+
type UploadEngineShape = {
|
|
4603
5131
|
createUpload: (inputFile: InputFile, clientId: string | null) => Effect.Effect<UploadFile, UploadistaError>;
|
|
4604
5132
|
uploadChunk: (uploadId: string, clientId: string | null, chunk: ReadableStream) => Effect.Effect<UploadFile, UploadistaError>;
|
|
4605
5133
|
getCapabilities: (storageId: string, clientId: string | null) => Effect.Effect<DataStoreCapabilities, UploadistaError>;
|
|
@@ -4622,7 +5150,7 @@ type UploadServerShape = {
|
|
|
4622
5150
|
*
|
|
4623
5151
|
* @example
|
|
4624
5152
|
* ```typescript
|
|
4625
|
-
* const server = yield*
|
|
5153
|
+
* const server = yield* UploadEngine;
|
|
4626
5154
|
* const stream = yield* server.readStream(uploadId, clientId, { chunkSize: 65536 });
|
|
4627
5155
|
* // Process stream chunk by chunk with bounded memory
|
|
4628
5156
|
* yield* Stream.runForEach(stream, (chunk) => processChunk(chunk));
|
|
@@ -4644,7 +5172,7 @@ type UploadServerShape = {
|
|
|
4644
5172
|
*
|
|
4645
5173
|
* @example
|
|
4646
5174
|
* ```typescript
|
|
4647
|
-
* const server = yield*
|
|
5175
|
+
* const server = yield* UploadEngine;
|
|
4648
5176
|
* const result = yield* server.uploadStream(
|
|
4649
5177
|
* {
|
|
4650
5178
|
* storageId: "s3-production",
|
|
@@ -4666,38 +5194,38 @@ type UploadServerShape = {
|
|
|
4666
5194
|
subscribeToUploadEvents: (uploadId: string, connection: WebSocketConnection) => Effect.Effect<void, UploadistaError>;
|
|
4667
5195
|
unsubscribeFromUploadEvents: (uploadId: string) => Effect.Effect<void, UploadistaError>;
|
|
4668
5196
|
};
|
|
4669
|
-
declare const
|
|
5197
|
+
declare const UploadEngine_base: Context.TagClass<UploadEngine, "UploadEngine", UploadEngineShape>;
|
|
4670
5198
|
/**
|
|
4671
|
-
* Effect-TS context tag for the
|
|
5199
|
+
* Effect-TS context tag for the UploadEngine service.
|
|
4672
5200
|
*
|
|
4673
|
-
* Use this tag to access the
|
|
5201
|
+
* Use this tag to access the UploadEngine in an Effect context.
|
|
4674
5202
|
* The server must be provided via a Layer or dependency injection.
|
|
4675
5203
|
*
|
|
4676
5204
|
* @example
|
|
4677
5205
|
* ```typescript
|
|
4678
|
-
* // Access
|
|
5206
|
+
* // Access UploadEngine in an Effect
|
|
4679
5207
|
* const uploadEffect = Effect.gen(function* () {
|
|
4680
|
-
* const server = yield*
|
|
5208
|
+
* const server = yield* UploadEngine;
|
|
4681
5209
|
* const upload = yield* server.createUpload(inputFile, clientId);
|
|
4682
5210
|
* return upload;
|
|
4683
5211
|
* });
|
|
4684
5212
|
*
|
|
4685
|
-
* // Provide
|
|
5213
|
+
* // Provide UploadEngine layer
|
|
4686
5214
|
* const program = uploadEffect.pipe(
|
|
4687
|
-
* Effect.provide(
|
|
5215
|
+
* Effect.provide(uploadEngine),
|
|
4688
5216
|
* Effect.provide(uploadFileKvStore),
|
|
4689
5217
|
* Effect.provide(dataStoreLayer),
|
|
4690
5218
|
* Effect.provide(eventEmitterLayer)
|
|
4691
5219
|
* );
|
|
4692
5220
|
* ```
|
|
4693
5221
|
*/
|
|
4694
|
-
declare class
|
|
5222
|
+
declare class UploadEngine extends UploadEngine_base {}
|
|
4695
5223
|
/**
|
|
4696
|
-
* Creates the
|
|
5224
|
+
* Creates the UploadEngine implementation.
|
|
4697
5225
|
*
|
|
4698
|
-
* This function constructs the
|
|
5226
|
+
* This function constructs the UploadEngine service by composing all required
|
|
4699
5227
|
* dependencies (KV store, data stores, event emitter, ID generator). It implements
|
|
4700
|
-
* all upload operations defined in
|
|
5228
|
+
* all upload operations defined in UploadEngineShape.
|
|
4701
5229
|
*
|
|
4702
5230
|
* The server automatically handles:
|
|
4703
5231
|
* - Upload lifecycle management (create, resume, complete)
|
|
@@ -4705,24 +5233,24 @@ declare class UploadServer extends UploadServer_base {}
|
|
|
4705
5233
|
* - Storage backend routing based on storageId
|
|
4706
5234
|
* - Error handling with proper UploadistaError types
|
|
4707
5235
|
*
|
|
4708
|
-
* @returns An Effect that yields the
|
|
5236
|
+
* @returns An Effect that yields the UploadEngineShape implementation
|
|
4709
5237
|
*
|
|
4710
5238
|
* @example
|
|
4711
5239
|
* ```typescript
|
|
4712
|
-
* // Create a custom
|
|
4713
|
-
* const
|
|
4714
|
-
*
|
|
4715
|
-
*
|
|
5240
|
+
* // Create a custom UploadEngine layer
|
|
5241
|
+
* const myUploadEngine = Layer.effect(
|
|
5242
|
+
* UploadEngine,
|
|
5243
|
+
* createUploadEngine()
|
|
4716
5244
|
* );
|
|
4717
5245
|
*
|
|
4718
5246
|
* // Use in a program
|
|
4719
5247
|
* const program = Effect.gen(function* () {
|
|
4720
|
-
* const server = yield*
|
|
5248
|
+
* const server = yield* UploadEngine;
|
|
4721
5249
|
* // Use server operations...
|
|
4722
|
-
* }).pipe(Effect.provide(
|
|
5250
|
+
* }).pipe(Effect.provide(myUploadEngine));
|
|
4723
5251
|
* ```
|
|
4724
5252
|
*/
|
|
4725
|
-
declare function
|
|
5253
|
+
declare function createUploadEngine(): Effect.Effect<{
|
|
4726
5254
|
upload: (inputFile: InputFile, clientId: string | null, stream: ReadableStream) => Effect.Effect<UploadFile, UploadistaError, never>;
|
|
4727
5255
|
uploadFromUrl: (inputFile: InputFile, clientId: string | null, url: string) => Effect.Effect<UploadFile, UploadistaError, never>;
|
|
4728
5256
|
createUpload: (inputFile: InputFile, clientId: string | null) => Effect.Effect<UploadFile, UploadistaError, never>;
|
|
@@ -4740,9 +5268,9 @@ declare function createUploadServer(): Effect.Effect<{
|
|
|
4740
5268
|
unsubscribeFromUploadEvents: (uploadId: string) => Effect.Effect<void, UploadistaError, never>;
|
|
4741
5269
|
}, never, GenerateId | UploadFileDataStores | UploadFileKVStore | UploadEventEmitter>;
|
|
4742
5270
|
/**
|
|
4743
|
-
* Pre-built
|
|
5271
|
+
* Pre-built UploadEngine Effect Layer.
|
|
4744
5272
|
*
|
|
4745
|
-
* This layer provides a ready-to-use
|
|
5273
|
+
* This layer provides a ready-to-use UploadEngine implementation that can be
|
|
4746
5274
|
* composed with other layers to build a complete upload system.
|
|
4747
5275
|
*
|
|
4748
5276
|
* Required dependencies:
|
|
@@ -4755,7 +5283,7 @@ declare function createUploadServer(): Effect.Effect<{
|
|
|
4755
5283
|
* ```typescript
|
|
4756
5284
|
* // Compose a complete upload system
|
|
4757
5285
|
* const fullUploadSystem = Layer.mergeAll(
|
|
4758
|
-
*
|
|
5286
|
+
* uploadEngine,
|
|
4759
5287
|
* uploadFileKvStore,
|
|
4760
5288
|
* dataStoreLayer,
|
|
4761
5289
|
* uploadEventEmitter,
|
|
@@ -4764,12 +5292,12 @@ declare function createUploadServer(): Effect.Effect<{
|
|
|
4764
5292
|
*
|
|
4765
5293
|
* // Use in application
|
|
4766
5294
|
* const app = Effect.gen(function* () {
|
|
4767
|
-
* const server = yield*
|
|
5295
|
+
* const server = yield* UploadEngine;
|
|
4768
5296
|
* // Perform uploads...
|
|
4769
5297
|
* }).pipe(Effect.provide(fullUploadSystem));
|
|
4770
5298
|
* ```
|
|
4771
5299
|
*/
|
|
4772
|
-
declare const
|
|
5300
|
+
declare const uploadEngine: Layer.Layer<UploadEngine, never, GenerateId | UploadFileDataStores | UploadFileKVStore | UploadEventEmitter>;
|
|
4773
5301
|
//#endregion
|
|
4774
5302
|
//#region src/upload/upload-strategy-negotiator.d.ts
|
|
4775
5303
|
/**
|
|
@@ -4796,273 +5324,130 @@ type UploadStrategyOptions = {
|
|
|
4796
5324
|
* @property parallelUploads - The negotiated number of parallel uploads
|
|
4797
5325
|
* @property reasoning - Array of reasoning strings explaining the decisions
|
|
4798
5326
|
* @property warnings - Array of warning messages about adjustments made
|
|
4799
|
-
*/
|
|
4800
|
-
type NegotiatedStrategy = {
|
|
4801
|
-
strategy: UploadStrategy;
|
|
4802
|
-
chunkSize: number;
|
|
4803
|
-
parallelUploads: number;
|
|
4804
|
-
reasoning: string[];
|
|
4805
|
-
warnings: string[];
|
|
4806
|
-
};
|
|
4807
|
-
/**
|
|
4808
|
-
* Negotiates the optimal upload strategy based on data store capabilities and file characteristics.
|
|
4809
|
-
*
|
|
4810
|
-
* This class analyzes data store capabilities, file size, and user preferences to determine
|
|
4811
|
-
* the best upload strategy (single, parallel, resumable) and optimal parameters like chunk size
|
|
4812
|
-
* and parallel connection count.
|
|
4813
|
-
*
|
|
4814
|
-
* The negotiator considers:
|
|
4815
|
-
* - Data store capabilities (parallel uploads, resumable uploads, concatenation)
|
|
4816
|
-
* - File size and chunk size constraints
|
|
4817
|
-
* - User preferences and requirements
|
|
4818
|
-
* - Performance optimization opportunities
|
|
4819
|
-
*
|
|
4820
|
-
* @example
|
|
4821
|
-
* ```typescript
|
|
4822
|
-
* // Create negotiator for S3 data store
|
|
4823
|
-
* const negotiator = new UploadStrategyNegotiator(
|
|
4824
|
-
* s3Capabilities,
|
|
4825
|
-
* (strategy) => s3Capabilities.supportsStrategy(strategy)
|
|
4826
|
-
* );
|
|
4827
|
-
*
|
|
4828
|
-
* // Negotiate strategy for large file
|
|
4829
|
-
* const result = negotiator.negotiateStrategy({
|
|
4830
|
-
* fileSize: 100_000_000, // 100MB
|
|
4831
|
-
* preferredStrategy: "parallel",
|
|
4832
|
-
* preferredChunkSize: 5_000_000, // 5MB chunks
|
|
4833
|
-
* parallelUploads: 4
|
|
4834
|
-
* });
|
|
4835
|
-
*
|
|
4836
|
-
* console.log(result.strategy); // "parallel"
|
|
4837
|
-
* console.log(result.chunkSize); // 5_000_000
|
|
4838
|
-
* console.log(result.reasoning); // ["Using preferred strategy: parallel", ...]
|
|
4839
|
-
* ```
|
|
4840
|
-
*/
|
|
4841
|
-
declare class UploadStrategyNegotiator {
|
|
4842
|
-
private capabilities;
|
|
4843
|
-
private validateUploadStrategy;
|
|
4844
|
-
/**
|
|
4845
|
-
* Creates a new upload strategy negotiator.
|
|
4846
|
-
*
|
|
4847
|
-
* @param capabilities - Data store capabilities and constraints
|
|
4848
|
-
* @param validateUploadStrategy - Function to validate if a strategy is supported
|
|
4849
|
-
*/
|
|
4850
|
-
constructor(capabilities: DataStoreCapabilities, validateUploadStrategy: (strategy: UploadStrategy) => boolean);
|
|
4851
|
-
/**
|
|
4852
|
-
* Negotiates the optimal upload strategy based on options and data store capabilities.
|
|
4853
|
-
*
|
|
4854
|
-
* This method analyzes the provided options and data store capabilities to determine
|
|
4855
|
-
* the best upload strategy, chunk size, and parallel upload settings. It considers
|
|
4856
|
-
* user preferences, file size, and data store constraints to make optimal decisions.
|
|
4857
|
-
*
|
|
4858
|
-
* The negotiation process:
|
|
4859
|
-
* 1. Validates preferred strategy against data store capabilities
|
|
4860
|
-
* 2. Automatically selects strategy based on file size and capabilities
|
|
4861
|
-
* 3. Adjusts chunk size to fit within data store constraints
|
|
4862
|
-
* 4. Validates parallel upload settings
|
|
4863
|
-
* 5. Ensures final strategy is supported by the data store
|
|
4864
|
-
*
|
|
4865
|
-
* @param options - Upload strategy options including file size and preferences
|
|
4866
|
-
* @returns Negotiated strategy with reasoning and warnings
|
|
4867
|
-
*
|
|
4868
|
-
* @example
|
|
4869
|
-
* ```typescript
|
|
4870
|
-
* const result = negotiator.negotiateStrategy({
|
|
4871
|
-
* fileSize: 50_000_000, // 50MB
|
|
4872
|
-
* preferredStrategy: "parallel",
|
|
4873
|
-
* preferredChunkSize: 5_000_000, // 5MB
|
|
4874
|
-
* parallelUploads: 3
|
|
4875
|
-
* });
|
|
4876
|
-
*
|
|
4877
|
-
* console.log(result.strategy); // "parallel"
|
|
4878
|
-
* console.log(result.chunkSize); // 5_000_000
|
|
4879
|
-
* console.log(result.parallelUploads); // 3
|
|
4880
|
-
* console.log(result.reasoning); // ["Using preferred strategy: parallel", ...]
|
|
4881
|
-
* console.log(result.warnings); // [] (no warnings)
|
|
4882
|
-
* ```
|
|
4883
|
-
*/
|
|
4884
|
-
negotiateStrategy(options: UploadStrategyOptions): NegotiatedStrategy;
|
|
4885
|
-
/**
|
|
4886
|
-
* Gets the data store capabilities used by this negotiator.
|
|
4887
|
-
*
|
|
4888
|
-
* @returns The data store capabilities and constraints
|
|
4889
|
-
*/
|
|
4890
|
-
getDataStoreCapabilities(): DataStoreCapabilities;
|
|
4891
|
-
/**
|
|
4892
|
-
* Validates upload strategy configuration against data store capabilities.
|
|
4893
|
-
*
|
|
4894
|
-
* This method checks if the provided configuration is valid for the current
|
|
4895
|
-
* data store capabilities without performing the actual negotiation. It's
|
|
4896
|
-
* useful for pre-validation before attempting to negotiate a strategy.
|
|
4897
|
-
*
|
|
4898
|
-
* @param options - Upload strategy options to validate
|
|
4899
|
-
* @returns Validation result with validity flag and error messages
|
|
4900
|
-
*
|
|
4901
|
-
* @example
|
|
4902
|
-
* ```typescript
|
|
4903
|
-
* const validation = negotiator.validateConfiguration({
|
|
4904
|
-
* fileSize: 10_000_000,
|
|
4905
|
-
* preferredStrategy: "parallel",
|
|
4906
|
-
* preferredChunkSize: 1_000_000,
|
|
4907
|
-
* parallelUploads: 5
|
|
4908
|
-
* });
|
|
4909
|
-
*
|
|
4910
|
-
* if (!validation.valid) {
|
|
4911
|
-
* console.log("Configuration errors:", validation.errors);
|
|
4912
|
-
* // Handle validation errors
|
|
4913
|
-
* }
|
|
4914
|
-
* ```
|
|
4915
|
-
*/
|
|
4916
|
-
validateConfiguration(options: UploadStrategyOptions): {
|
|
4917
|
-
valid: boolean;
|
|
4918
|
-
errors: string[];
|
|
4919
|
-
};
|
|
4920
|
-
}
|
|
4921
|
-
//#endregion
|
|
4922
|
-
//#region src/flow/types/flow-job.d.ts
|
|
4923
|
-
/**
|
|
4924
|
-
* Status of an individual node within a flow job.
|
|
4925
|
-
*
|
|
4926
|
-
* Node tasks follow this lifecycle:
|
|
4927
|
-
* started → pending → running → (completed | paused | failed)
|
|
4928
|
-
*/
|
|
4929
|
-
type FlowJobTaskStatus = "started" | "pending" | "running" | "completed" | "paused" | "failed";
|
|
4930
|
-
/**
|
|
4931
|
-
* Represents a single node's execution within a flow job.
|
|
4932
|
-
*
|
|
4933
|
-
* Tasks track individual node execution, storing results, errors, and retry information.
|
|
4934
|
-
* They allow monitoring of which nodes have completed and accessing intermediate results.
|
|
4935
|
-
*
|
|
4936
|
-
* @property nodeId - Unique identifier of the node this task represents
|
|
4937
|
-
* @property status - Current execution status of the node
|
|
4938
|
-
* @property result - Node execution result data (can be partial data if paused, or complete data if finished)
|
|
4939
|
-
* @property error - Error message if the node failed
|
|
4940
|
-
* @property retryCount - Number of retry attempts made before success or final failure
|
|
4941
|
-
* @property createdAt - When the task was created
|
|
4942
|
-
* @property updatedAt - When the task was last updated
|
|
4943
|
-
*
|
|
4944
|
-
* @remarks
|
|
4945
|
-
* The result field can contain:
|
|
4946
|
-
* - Partial/intermediate data when status is "paused" (unknown type)
|
|
4947
|
-
* - Complete data when status is "completed" (could be TypedOutput for output nodes)
|
|
4948
|
-
* - undefined when status is "pending", "running", "started", or "failed"
|
|
4949
|
-
*
|
|
4950
|
-
* For type-safe access to final outputs, use FlowJob.result instead, which contains
|
|
4951
|
-
* the array of TypedOutput from all output nodes.
|
|
4952
|
-
*/
|
|
4953
|
-
type FlowJobTask = {
|
|
4954
|
-
nodeId: string;
|
|
4955
|
-
status: FlowJobTaskStatus;
|
|
4956
|
-
result?: unknown;
|
|
4957
|
-
error?: string;
|
|
4958
|
-
retryCount?: number;
|
|
4959
|
-
createdAt: Date;
|
|
4960
|
-
updatedAt: Date;
|
|
4961
|
-
};
|
|
4962
|
-
/**
|
|
4963
|
-
* Represents a flow execution job with full state tracking.
|
|
4964
|
-
*
|
|
4965
|
-
* Jobs are created when a flow is executed and track the entire execution lifecycle.
|
|
4966
|
-
* They store node results, handle paused states, and manage cleanup of intermediate files.
|
|
4967
|
-
*
|
|
4968
|
-
* @property id - Unique job identifier (UUID)
|
|
4969
|
-
* @property flowId - The flow being executed
|
|
4970
|
-
* @property storageId - Storage location for file outputs
|
|
4971
|
-
* @property clientId - Client that initiated the job (for authorization)
|
|
4972
|
-
* @property status - Overall job status
|
|
4973
|
-
* @property createdAt - When the job was created
|
|
4974
|
-
* @property updatedAt - When the job was last updated
|
|
4975
|
-
* @property tasks - Array of node execution tasks
|
|
4976
|
-
* @property error - Error message if the job failed
|
|
4977
|
-
* @property endedAt - When the job completed or failed
|
|
4978
|
-
* @property result - Array of typed outputs from all output nodes (only set when completed)
|
|
4979
|
-
* @property pausedAt - Node ID where execution is paused (if applicable)
|
|
4980
|
-
* @property executionState - State needed to resume a paused flow
|
|
4981
|
-
* @property intermediateFiles - File IDs to cleanup after completion
|
|
4982
|
-
*
|
|
4983
|
-
* @remarks
|
|
4984
|
-
* - Jobs can be paused at nodes that return `{ type: "waiting" }`
|
|
4985
|
-
* - Paused jobs store execution state and can be resumed with new data
|
|
4986
|
-
* - Intermediate files from non-output nodes are automatically cleaned up
|
|
4987
|
-
* - Tasks are updated as nodes progress through their lifecycle
|
|
4988
|
-
* - The result field now contains an array of TypedOutput for all output nodes
|
|
4989
|
-
*
|
|
4990
|
-
* @example
|
|
4991
|
-
* ```typescript
|
|
4992
|
-
* // Create and monitor a job
|
|
4993
|
-
* const job = yield* flowServer.runFlow({
|
|
4994
|
-
* flowId: "image-pipeline",
|
|
4995
|
-
* storageId: "storage-1",
|
|
4996
|
-
* inputs: { input: myFile }
|
|
4997
|
-
* });
|
|
4998
|
-
*
|
|
4999
|
-
* // Poll for status
|
|
5000
|
-
* const status = yield* flowServer.getJobStatus(job.id);
|
|
5001
|
-
* if (status.status === "completed") {
|
|
5002
|
-
* // Access typed outputs
|
|
5003
|
-
* console.log("Outputs:", status.result);
|
|
5004
|
-
* for (const output of status.result || []) {
|
|
5005
|
-
* console.log(`${output.nodeId} (${output.nodeType}):`, output.data);
|
|
5006
|
-
* }
|
|
5007
|
-
* } else if (status.status === "paused") {
|
|
5008
|
-
* // Resume with additional data
|
|
5009
|
-
* yield* flowServer.resumeFlow({
|
|
5010
|
-
* jobId: job.id,
|
|
5011
|
-
* nodeId: status.pausedAt,
|
|
5012
|
-
* newData: additionalChunk
|
|
5013
|
-
* });
|
|
5014
|
-
* }
|
|
5015
|
-
* ```
|
|
5016
|
-
*/
|
|
5017
|
-
/**
|
|
5018
|
-
* Trace context for distributed tracing.
|
|
5019
|
-
* Allows flow operations to be linked under a single trace.
|
|
5020
|
-
*/
|
|
5021
|
-
type FlowJobTraceContext = {
|
|
5022
|
-
/** 128-bit trace identifier (32 hex characters) */
|
|
5023
|
-
traceId: string;
|
|
5024
|
-
/** 64-bit span identifier (16 hex characters) */
|
|
5025
|
-
spanId: string;
|
|
5026
|
-
/** Trace flags (1 = sampled) */
|
|
5027
|
-
traceFlags: number;
|
|
5028
|
-
};
|
|
5029
|
-
type FlowJob = {
|
|
5030
|
-
id: string;
|
|
5031
|
-
flowId: string;
|
|
5032
|
-
storageId: string;
|
|
5033
|
-
clientId: string | null;
|
|
5034
|
-
status: FlowJobStatus;
|
|
5035
|
-
createdAt: Date;
|
|
5036
|
-
updatedAt: Date;
|
|
5037
|
-
tasks: FlowJobTask[];
|
|
5038
|
-
error?: string;
|
|
5039
|
-
endedAt?: Date;
|
|
5040
|
-
result?: TypedOutput[];
|
|
5041
|
-
pausedAt?: string;
|
|
5042
|
-
executionState?: {
|
|
5043
|
-
executionOrder: string[];
|
|
5044
|
-
currentIndex: number;
|
|
5045
|
-
inputs: Record<string, unknown>;
|
|
5046
|
-
};
|
|
5047
|
-
intermediateFiles?: string[];
|
|
5048
|
-
/**
|
|
5049
|
-
* OpenTelemetry trace context for distributed tracing.
|
|
5050
|
-
* When set, all flow operations (node executions, uploads) will be
|
|
5051
|
-
* linked as children of this trace context. Enables end-to-end tracing
|
|
5052
|
-
* of flow executions in observability tools like Grafana Tempo.
|
|
5053
|
-
*/
|
|
5054
|
-
traceContext?: FlowJobTraceContext;
|
|
5327
|
+
*/
|
|
5328
|
+
type NegotiatedStrategy = {
|
|
5329
|
+
strategy: UploadStrategy;
|
|
5330
|
+
chunkSize: number;
|
|
5331
|
+
parallelUploads: number;
|
|
5332
|
+
reasoning: string[];
|
|
5333
|
+
warnings: string[];
|
|
5055
5334
|
};
|
|
5056
5335
|
/**
|
|
5057
|
-
*
|
|
5336
|
+
* Negotiates the optimal upload strategy based on data store capabilities and file characteristics.
|
|
5058
5337
|
*
|
|
5059
|
-
*
|
|
5060
|
-
*
|
|
5061
|
-
*
|
|
5338
|
+
* This class analyzes data store capabilities, file size, and user preferences to determine
|
|
5339
|
+
* the best upload strategy (single, parallel, resumable) and optimal parameters like chunk size
|
|
5340
|
+
* and parallel connection count.
|
|
5341
|
+
*
|
|
5342
|
+
* The negotiator considers:
|
|
5343
|
+
* - Data store capabilities (parallel uploads, resumable uploads, concatenation)
|
|
5344
|
+
* - File size and chunk size constraints
|
|
5345
|
+
* - User preferences and requirements
|
|
5346
|
+
* - Performance optimization opportunities
|
|
5347
|
+
*
|
|
5348
|
+
* @example
|
|
5349
|
+
* ```typescript
|
|
5350
|
+
* // Create negotiator for S3 data store
|
|
5351
|
+
* const negotiator = new UploadStrategyNegotiator(
|
|
5352
|
+
* s3Capabilities,
|
|
5353
|
+
* (strategy) => s3Capabilities.supportsStrategy(strategy)
|
|
5354
|
+
* );
|
|
5355
|
+
*
|
|
5356
|
+
* // Negotiate strategy for large file
|
|
5357
|
+
* const result = negotiator.negotiateStrategy({
|
|
5358
|
+
* fileSize: 100_000_000, // 100MB
|
|
5359
|
+
* preferredStrategy: "parallel",
|
|
5360
|
+
* preferredChunkSize: 5_000_000, // 5MB chunks
|
|
5361
|
+
* parallelUploads: 4
|
|
5362
|
+
* });
|
|
5363
|
+
*
|
|
5364
|
+
* console.log(result.strategy); // "parallel"
|
|
5365
|
+
* console.log(result.chunkSize); // 5_000_000
|
|
5366
|
+
* console.log(result.reasoning); // ["Using preferred strategy: parallel", ...]
|
|
5367
|
+
* ```
|
|
5062
5368
|
*/
|
|
5063
|
-
|
|
5369
|
+
declare class UploadStrategyNegotiator {
|
|
5370
|
+
private capabilities;
|
|
5371
|
+
private validateUploadStrategy;
|
|
5372
|
+
/**
|
|
5373
|
+
* Creates a new upload strategy negotiator.
|
|
5374
|
+
*
|
|
5375
|
+
* @param capabilities - Data store capabilities and constraints
|
|
5376
|
+
* @param validateUploadStrategy - Function to validate if a strategy is supported
|
|
5377
|
+
*/
|
|
5378
|
+
constructor(capabilities: DataStoreCapabilities, validateUploadStrategy: (strategy: UploadStrategy) => boolean);
|
|
5379
|
+
/**
|
|
5380
|
+
* Negotiates the optimal upload strategy based on options and data store capabilities.
|
|
5381
|
+
*
|
|
5382
|
+
* This method analyzes the provided options and data store capabilities to determine
|
|
5383
|
+
* the best upload strategy, chunk size, and parallel upload settings. It considers
|
|
5384
|
+
* user preferences, file size, and data store constraints to make optimal decisions.
|
|
5385
|
+
*
|
|
5386
|
+
* The negotiation process:
|
|
5387
|
+
* 1. Validates preferred strategy against data store capabilities
|
|
5388
|
+
* 2. Automatically selects strategy based on file size and capabilities
|
|
5389
|
+
* 3. Adjusts chunk size to fit within data store constraints
|
|
5390
|
+
* 4. Validates parallel upload settings
|
|
5391
|
+
* 5. Ensures final strategy is supported by the data store
|
|
5392
|
+
*
|
|
5393
|
+
* @param options - Upload strategy options including file size and preferences
|
|
5394
|
+
* @returns Negotiated strategy with reasoning and warnings
|
|
5395
|
+
*
|
|
5396
|
+
* @example
|
|
5397
|
+
* ```typescript
|
|
5398
|
+
* const result = negotiator.negotiateStrategy({
|
|
5399
|
+
* fileSize: 50_000_000, // 50MB
|
|
5400
|
+
* preferredStrategy: "parallel",
|
|
5401
|
+
* preferredChunkSize: 5_000_000, // 5MB
|
|
5402
|
+
* parallelUploads: 3
|
|
5403
|
+
* });
|
|
5404
|
+
*
|
|
5405
|
+
* console.log(result.strategy); // "parallel"
|
|
5406
|
+
* console.log(result.chunkSize); // 5_000_000
|
|
5407
|
+
* console.log(result.parallelUploads); // 3
|
|
5408
|
+
* console.log(result.reasoning); // ["Using preferred strategy: parallel", ...]
|
|
5409
|
+
* console.log(result.warnings); // [] (no warnings)
|
|
5410
|
+
* ```
|
|
5411
|
+
*/
|
|
5412
|
+
negotiateStrategy(options: UploadStrategyOptions): NegotiatedStrategy;
|
|
5413
|
+
/**
|
|
5414
|
+
* Gets the data store capabilities used by this negotiator.
|
|
5415
|
+
*
|
|
5416
|
+
* @returns The data store capabilities and constraints
|
|
5417
|
+
*/
|
|
5418
|
+
getDataStoreCapabilities(): DataStoreCapabilities;
|
|
5419
|
+
/**
|
|
5420
|
+
* Validates upload strategy configuration against data store capabilities.
|
|
5421
|
+
*
|
|
5422
|
+
* This method checks if the provided configuration is valid for the current
|
|
5423
|
+
* data store capabilities without performing the actual negotiation. It's
|
|
5424
|
+
* useful for pre-validation before attempting to negotiate a strategy.
|
|
5425
|
+
*
|
|
5426
|
+
* @param options - Upload strategy options to validate
|
|
5427
|
+
* @returns Validation result with validity flag and error messages
|
|
5428
|
+
*
|
|
5429
|
+
* @example
|
|
5430
|
+
* ```typescript
|
|
5431
|
+
* const validation = negotiator.validateConfiguration({
|
|
5432
|
+
* fileSize: 10_000_000,
|
|
5433
|
+
* preferredStrategy: "parallel",
|
|
5434
|
+
* preferredChunkSize: 1_000_000,
|
|
5435
|
+
* parallelUploads: 5
|
|
5436
|
+
* });
|
|
5437
|
+
*
|
|
5438
|
+
* if (!validation.valid) {
|
|
5439
|
+
* console.log("Configuration errors:", validation.errors);
|
|
5440
|
+
* // Handle validation errors
|
|
5441
|
+
* }
|
|
5442
|
+
* ```
|
|
5443
|
+
*/
|
|
5444
|
+
validateConfiguration(options: UploadStrategyOptions): {
|
|
5445
|
+
valid: boolean;
|
|
5446
|
+
errors: string[];
|
|
5447
|
+
};
|
|
5448
|
+
}
|
|
5064
5449
|
//#endregion
|
|
5065
|
-
//#region src/flow/flow-
|
|
5450
|
+
//#region src/flow/flow-engine.d.ts
|
|
5066
5451
|
/**
|
|
5067
5452
|
* WaitUntil callback type for keeping background tasks alive.
|
|
5068
5453
|
* Used in serverless environments like Cloudflare Workers to prevent
|
|
@@ -5095,7 +5480,7 @@ declare class FlowWaitUntil extends FlowWaitUntil_base {
|
|
|
5095
5480
|
/**
|
|
5096
5481
|
* Flow provider interface that applications must implement.
|
|
5097
5482
|
*
|
|
5098
|
-
* This interface defines how the
|
|
5483
|
+
* This interface defines how the FlowEngine retrieves flow definitions.
|
|
5099
5484
|
* Applications provide their own implementation to load flows from a database,
|
|
5100
5485
|
* configuration files, or any other source.
|
|
5101
5486
|
*
|
|
@@ -5123,7 +5508,7 @@ declare class FlowWaitUntil extends FlowWaitUntil_base {
|
|
|
5123
5508
|
* })
|
|
5124
5509
|
* };
|
|
5125
5510
|
*
|
|
5126
|
-
* // Provide to
|
|
5511
|
+
* // Provide to FlowEngine
|
|
5127
5512
|
* const flowProviderLayer = Layer.succeed(FlowProvider, dbFlowProvider);
|
|
5128
5513
|
* ```
|
|
5129
5514
|
*/
|
|
@@ -5135,7 +5520,7 @@ declare const FlowProvider_base: Context.TagClass<FlowProvider, "FlowProvider",
|
|
|
5135
5520
|
* Effect-TS context tag for the FlowProvider service.
|
|
5136
5521
|
*
|
|
5137
5522
|
* Applications must provide an implementation of FlowProviderShape
|
|
5138
|
-
* to enable the
|
|
5523
|
+
* to enable the FlowEngine to retrieve flow definitions.
|
|
5139
5524
|
*
|
|
5140
5525
|
* @example
|
|
5141
5526
|
* ```typescript
|
|
@@ -5171,7 +5556,7 @@ declare class FlowProvider extends FlowProvider_base {}
|
|
|
5171
5556
|
* ```typescript
|
|
5172
5557
|
* // Execute a flow
|
|
5173
5558
|
* const program = Effect.gen(function* () {
|
|
5174
|
-
* const server = yield*
|
|
5559
|
+
* const server = yield* FlowEngine;
|
|
5175
5560
|
*
|
|
5176
5561
|
* // Start flow execution (returns immediately)
|
|
5177
5562
|
* const job = yield* server.runFlow({
|
|
@@ -5198,7 +5583,7 @@ declare class FlowProvider extends FlowProvider_base {}
|
|
|
5198
5583
|
*
|
|
5199
5584
|
* // Resume a paused flow
|
|
5200
5585
|
* const resume = Effect.gen(function* () {
|
|
5201
|
-
* const server = yield*
|
|
5586
|
+
* const server = yield* FlowEngine;
|
|
5202
5587
|
*
|
|
5203
5588
|
* // Flow paused waiting for user input at node "approval_1"
|
|
5204
5589
|
* const job = yield* server.resumeFlow({
|
|
@@ -5213,7 +5598,7 @@ declare class FlowProvider extends FlowProvider_base {}
|
|
|
5213
5598
|
*
|
|
5214
5599
|
* // Cancel a flow
|
|
5215
5600
|
* const cancel = Effect.gen(function* () {
|
|
5216
|
-
* const server = yield*
|
|
5601
|
+
* const server = yield* FlowEngine;
|
|
5217
5602
|
*
|
|
5218
5603
|
* // Cancel flow and cleanup intermediate files
|
|
5219
5604
|
* const job = yield* server.cancelFlow("job123", "client123");
|
|
@@ -5223,7 +5608,7 @@ declare class FlowProvider extends FlowProvider_base {}
|
|
|
5223
5608
|
*
|
|
5224
5609
|
* // Check flow structure before execution
|
|
5225
5610
|
* const inspect = Effect.gen(function* () {
|
|
5226
|
-
* const server = yield*
|
|
5611
|
+
* const server = yield* FlowEngine;
|
|
5227
5612
|
*
|
|
5228
5613
|
* const flowData = yield* server.getFlowData("resize-optimize", "client123");
|
|
5229
5614
|
* console.log("Nodes:", flowData.nodes);
|
|
@@ -5233,7 +5618,7 @@ declare class FlowProvider extends FlowProvider_base {}
|
|
|
5233
5618
|
* });
|
|
5234
5619
|
* ```
|
|
5235
5620
|
*/
|
|
5236
|
-
type
|
|
5621
|
+
type FlowEngineShape = {
|
|
5237
5622
|
getFlow: <TRequirements>(flowId: string, clientId: string | null) => Effect.Effect<Flow<any, any, TRequirements>, UploadistaError>;
|
|
5238
5623
|
getFlowData: (flowId: string, clientId: string | null) => Effect.Effect<FlowData, UploadistaError>;
|
|
5239
5624
|
runFlow: <TRequirements>({
|
|
@@ -5264,18 +5649,18 @@ type FlowServerShape = {
|
|
|
5264
5649
|
subscribeToFlowEvents: (jobId: string, connection: WebSocketConnection) => Effect.Effect<void, UploadistaError>;
|
|
5265
5650
|
unsubscribeFromFlowEvents: (jobId: string) => Effect.Effect<void, UploadistaError>;
|
|
5266
5651
|
};
|
|
5267
|
-
declare const
|
|
5652
|
+
declare const FlowEngine_base: Context.TagClass<FlowEngine, "FlowEngine", FlowEngineShape>;
|
|
5268
5653
|
/**
|
|
5269
|
-
* Effect-TS context tag for the
|
|
5654
|
+
* Effect-TS context tag for the FlowEngine service.
|
|
5270
5655
|
*
|
|
5271
|
-
* Use this tag to access the
|
|
5656
|
+
* Use this tag to access the FlowEngine in an Effect context.
|
|
5272
5657
|
* The server must be provided via a Layer or dependency injection.
|
|
5273
5658
|
*
|
|
5274
5659
|
* @example
|
|
5275
5660
|
* ```typescript
|
|
5276
|
-
* // Access
|
|
5661
|
+
* // Access FlowEngine in an Effect
|
|
5277
5662
|
* const flowEffect = Effect.gen(function* () {
|
|
5278
|
-
* const server = yield*
|
|
5663
|
+
* const server = yield* FlowEngine;
|
|
5279
5664
|
* const job = yield* server.runFlow({
|
|
5280
5665
|
* flowId: "my-flow",
|
|
5281
5666
|
* storageId: "s3",
|
|
@@ -5285,7 +5670,7 @@ declare const FlowServer_base: Context.TagClass<FlowServer, "FlowServer", FlowSe
|
|
|
5285
5670
|
* return job;
|
|
5286
5671
|
* });
|
|
5287
5672
|
*
|
|
5288
|
-
* // Provide
|
|
5673
|
+
* // Provide FlowEngine layer
|
|
5289
5674
|
* const program = flowEffect.pipe(
|
|
5290
5675
|
* Effect.provide(flowServer),
|
|
5291
5676
|
* Effect.provide(flowProviderLayer),
|
|
@@ -5293,9 +5678,9 @@ declare const FlowServer_base: Context.TagClass<FlowServer, "FlowServer", FlowSe
|
|
|
5293
5678
|
* );
|
|
5294
5679
|
* ```
|
|
5295
5680
|
*/
|
|
5296
|
-
declare class
|
|
5681
|
+
declare class FlowEngine extends FlowEngine_base {}
|
|
5297
5682
|
/**
|
|
5298
|
-
* Legacy configuration options for
|
|
5683
|
+
* Legacy configuration options for FlowEngine.
|
|
5299
5684
|
*
|
|
5300
5685
|
* @deprecated Use Effect Layers and FlowProvider instead.
|
|
5301
5686
|
* This type is kept for backward compatibility.
|
|
@@ -5303,7 +5688,7 @@ declare class FlowServer extends FlowServer_base {}
|
|
|
5303
5688
|
* @property getFlow - Function to retrieve flow definitions
|
|
5304
5689
|
* @property kvStore - KV store for flow job metadata
|
|
5305
5690
|
*/
|
|
5306
|
-
type
|
|
5691
|
+
type FlowEngineOptions = {
|
|
5307
5692
|
getFlow: <TRequirements>({
|
|
5308
5693
|
flowId,
|
|
5309
5694
|
storageId
|
|
@@ -5313,7 +5698,7 @@ type FlowServerOptions = {
|
|
|
5313
5698
|
}) => Promise<Flow<any, any, TRequirements>>;
|
|
5314
5699
|
kvStore: KvStore<FlowJob>;
|
|
5315
5700
|
};
|
|
5316
|
-
declare function
|
|
5701
|
+
declare function createFlowEngine(): Effect.Effect<{
|
|
5317
5702
|
getFlow: <TRequirements>(flowId: string, clientId: string | null) => Effect.Effect<Flow<any, any, any>, UploadistaError, never>;
|
|
5318
5703
|
getFlowData: (flowId: string, clientId: string | null) => Effect.Effect<FlowData, UploadistaError, never>;
|
|
5319
5704
|
runFlow: ({
|
|
@@ -5343,9 +5728,9 @@ declare function createFlowServer(): Effect.Effect<{
|
|
|
5343
5728
|
cancelFlow: (jobId: string, clientId: string | null) => Effect.Effect<FlowJob, UploadistaError, never>;
|
|
5344
5729
|
subscribeToFlowEvents: (jobId: string, connection: WebSocketConnection) => Effect.Effect<void, UploadistaError, never>;
|
|
5345
5730
|
unsubscribeFromFlowEvents: (jobId: string) => Effect.Effect<void, UploadistaError, never>;
|
|
5346
|
-
}, never,
|
|
5347
|
-
declare const
|
|
5348
|
-
type
|
|
5731
|
+
}, never, UploadEngine | FlowEventEmitter | FlowJobKVStore | FlowProvider>;
|
|
5732
|
+
declare const flowEngine: Layer.Layer<FlowEngine, never, UploadEngine | FlowEventEmitter | FlowJobKVStore | FlowProvider>;
|
|
5733
|
+
type FlowEngineLayer = typeof flowEngine;
|
|
5349
5734
|
//#endregion
|
|
5350
5735
|
//#region src/flow/nodes/input-node.d.ts
|
|
5351
5736
|
/**
|
|
@@ -5473,7 +5858,7 @@ declare function createInputNode(id: string, params?: InputNodeParams, options?:
|
|
|
5473
5858
|
circuitBreaker?: FlowCircuitBreakerConfig;
|
|
5474
5859
|
} & {
|
|
5475
5860
|
type: NodeType.input;
|
|
5476
|
-
}, UploadistaError,
|
|
5861
|
+
}, UploadistaError, UploadEngine>;
|
|
5477
5862
|
//#endregion
|
|
5478
5863
|
//#region src/flow/types/flow-file.d.ts
|
|
5479
5864
|
/**
|
|
@@ -5865,7 +6250,7 @@ declare function createTransformNode({
|
|
|
5865
6250
|
circuitBreaker?: FlowCircuitBreakerConfig;
|
|
5866
6251
|
} & {
|
|
5867
6252
|
type: NodeType;
|
|
5868
|
-
}, UploadistaError,
|
|
6253
|
+
}, UploadistaError, UploadEngine>;
|
|
5869
6254
|
//#endregion
|
|
5870
6255
|
//#region src/flow/parallel-scheduler.d.ts
|
|
5871
6256
|
/**
|
|
@@ -6375,8 +6760,8 @@ type ImageAiPluginLayer = Layer.Layer<ImageAiPlugin, never, never>;
|
|
|
6375
6760
|
declare const optimizeParamsSchema: z.ZodObject<{
|
|
6376
6761
|
quality: z.ZodNumber;
|
|
6377
6762
|
format: z.ZodEnum<{
|
|
6378
|
-
webp: "webp";
|
|
6379
6763
|
jpeg: "jpeg";
|
|
6764
|
+
webp: "webp";
|
|
6380
6765
|
png: "png";
|
|
6381
6766
|
avif: "avif";
|
|
6382
6767
|
}>;
|
|
@@ -7123,1354 +7508,959 @@ type VideoPluginShape = {
|
|
|
7123
7508
|
/**
|
|
7124
7509
|
* Resizes a video using streaming for memory-efficient processing of large files.
|
|
7125
7510
|
*
|
|
7126
|
-
* This method outputs the resized video as a stream, reducing peak memory usage.
|
|
7127
|
-
* For input, it accepts either a buffered Uint8Array or a Stream. Streaming input
|
|
7128
|
-
* is only supported for MPEG-TS format; other formats will be buffered internally.
|
|
7129
|
-
*
|
|
7130
|
-
* @param input - The input video as Uint8Array or Stream (MPEG-TS only for streaming)
|
|
7131
|
-
* @param options - Resize parameters including width, height, and aspect ratio
|
|
7132
|
-
* @param streamOptions - Optional streaming configuration including input format hint
|
|
7133
|
-
* @returns An Effect that resolves to a Stream of the resized video bytes
|
|
7134
|
-
* @throws {UploadistaError} When video resizing fails
|
|
7135
|
-
*
|
|
7136
|
-
* @example
|
|
7137
|
-
* ```typescript
|
|
7138
|
-
* const program = Effect.gen(function* () {
|
|
7139
|
-
* const videoPlugin = yield* VideoPlugin;
|
|
7140
|
-
* const inputStream = yield* dataStore.readStream(fileId);
|
|
7141
|
-
* const outputStream = yield* videoPlugin.resizeStream(inputStream, {
|
|
7142
|
-
* width: 1280,
|
|
7143
|
-
* height: 720,
|
|
7144
|
-
* aspectRatio: "keep"
|
|
7145
|
-
* });
|
|
7146
|
-
* return outputStream;
|
|
7147
|
-
* });
|
|
7148
|
-
* ```
|
|
7149
|
-
*/
|
|
7150
|
-
resizeStream?: (input: VideoStreamInput, options: ResizeVideoParams, streamOptions?: VideoStreamOptions) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;
|
|
7151
|
-
/**
|
|
7152
|
-
* Trims a video using streaming for memory-efficient processing of large files.
|
|
7153
|
-
*
|
|
7154
|
-
* This method outputs the trimmed video as a stream, reducing peak memory usage.
|
|
7155
|
-
* For input, it accepts either a buffered Uint8Array or a Stream. Streaming input
|
|
7156
|
-
* is only supported for MPEG-TS format; other formats will be buffered internally.
|
|
7157
|
-
*
|
|
7158
|
-
* @param input - The input video as Uint8Array or Stream (MPEG-TS only for streaming)
|
|
7159
|
-
* @param options - Trim parameters including start time and end time/duration
|
|
7160
|
-
* @param streamOptions - Optional streaming configuration including input format hint
|
|
7161
|
-
* @returns An Effect that resolves to a Stream of the trimmed video bytes
|
|
7162
|
-
* @throws {UploadistaError} When video trimming fails
|
|
7163
|
-
*
|
|
7164
|
-
* @example
|
|
7165
|
-
* ```typescript
|
|
7166
|
-
* const program = Effect.gen(function* () {
|
|
7167
|
-
* const videoPlugin = yield* VideoPlugin;
|
|
7168
|
-
* const inputStream = yield* dataStore.readStream(fileId);
|
|
7169
|
-
* const outputStream = yield* videoPlugin.trimStream(inputStream, {
|
|
7170
|
-
* startTime: 10,
|
|
7171
|
-
* endTime: 30
|
|
7172
|
-
* });
|
|
7173
|
-
* return outputStream;
|
|
7174
|
-
* });
|
|
7175
|
-
* ```
|
|
7176
|
-
*/
|
|
7177
|
-
trimStream?: (input: VideoStreamInput, options: TrimVideoParams, streamOptions?: VideoStreamOptions) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;
|
|
7178
|
-
/**
|
|
7179
|
-
* Indicates whether this plugin supports streaming operations.
|
|
7180
|
-
* Returns true if streaming methods are available and functional.
|
|
7181
|
-
*/
|
|
7182
|
-
supportsStreaming?: boolean;
|
|
7183
|
-
};
|
|
7184
|
-
declare const VideoPlugin_base: Context.TagClass<VideoPlugin, "VideoPlugin", VideoPluginShape>;
|
|
7185
|
-
/**
|
|
7186
|
-
* Context tag for the Video Plugin.
|
|
7187
|
-
*
|
|
7188
|
-
* This tag provides a type-safe way to access video processing functionality
|
|
7189
|
-
* throughout the application using Effect's dependency injection system.
|
|
7190
|
-
*
|
|
7191
|
-
* @example
|
|
7192
|
-
* ```typescript
|
|
7193
|
-
* import { VideoPlugin } from "@uploadista/core/flow/plugins";
|
|
7194
|
-
*
|
|
7195
|
-
* // In your flow node
|
|
7196
|
-
* const program = Effect.gen(function* () {
|
|
7197
|
-
* const videoPlugin = yield* VideoPlugin;
|
|
7198
|
-
* const transcoded = yield* videoPlugin.transcode(videoData, { format: "webm", codec: "vp9" });
|
|
7199
|
-
* const resized = yield* videoPlugin.resize(transcoded, { width: 1280, height: 720, aspectRatio: "keep" });
|
|
7200
|
-
* return resized;
|
|
7201
|
-
* });
|
|
7202
|
-
* ```
|
|
7203
|
-
*/
|
|
7204
|
-
declare class VideoPlugin extends VideoPlugin_base {}
|
|
7205
|
-
type VideoPluginLayer = Layer.Layer<VideoPlugin, never, never>;
|
|
7206
|
-
//#endregion
|
|
7207
|
-
//#region src/flow/plugins/virus-scan-plugin.d.ts
|
|
7208
|
-
/**
|
|
7209
|
-
* Result of a virus scan operation.
|
|
7210
|
-
*/
|
|
7211
|
-
type ScanResult = {
|
|
7212
|
-
/**
|
|
7213
|
-
* Whether the file is clean (no viruses detected)
|
|
7214
|
-
*/
|
|
7215
|
-
isClean: boolean;
|
|
7216
|
-
/**
|
|
7217
|
-
* Array of detected virus/malware names (empty if clean)
|
|
7218
|
-
*/
|
|
7219
|
-
detectedViruses: string[];
|
|
7220
|
-
};
|
|
7221
|
-
/**
|
|
7222
|
-
* Comprehensive metadata about a virus scan operation.
|
|
7223
|
-
*/
|
|
7224
|
-
type ScanMetadata = {
|
|
7225
|
-
/**
|
|
7226
|
-
* Whether the file was scanned
|
|
7227
|
-
*/
|
|
7228
|
-
scanned: boolean;
|
|
7229
|
-
/**
|
|
7230
|
-
* Whether the file is clean (no viruses detected)
|
|
7231
|
-
*/
|
|
7232
|
-
isClean: boolean;
|
|
7233
|
-
/**
|
|
7234
|
-
* Array of detected virus/malware names (empty if clean)
|
|
7235
|
-
*/
|
|
7236
|
-
detectedViruses: string[];
|
|
7237
|
-
/**
|
|
7238
|
-
* ISO 8601 timestamp of when the scan was performed
|
|
7239
|
-
*/
|
|
7240
|
-
scanDate: string;
|
|
7241
|
-
/**
|
|
7242
|
-
* Version of the antivirus engine used
|
|
7243
|
-
*/
|
|
7244
|
-
engineVersion: string;
|
|
7245
|
-
/**
|
|
7246
|
-
* ISO 8601 timestamp of when virus definitions were last updated
|
|
7247
|
-
*/
|
|
7248
|
-
definitionsDate: string;
|
|
7249
|
-
};
|
|
7250
|
-
/**
|
|
7251
|
-
* Shape definition for the Virus Scan Plugin interface.
|
|
7252
|
-
* Defines the contract that all virus scanning implementations must follow.
|
|
7253
|
-
*/
|
|
7254
|
-
type VirusScanPluginShape = {
|
|
7255
|
-
/**
|
|
7256
|
-
* Scans a file for viruses and malware.
|
|
7511
|
+
* This method outputs the resized video as a stream, reducing peak memory usage.
|
|
7512
|
+
* For input, it accepts either a buffered Uint8Array or a Stream. Streaming input
|
|
7513
|
+
* is only supported for MPEG-TS format; other formats will be buffered internally.
|
|
7257
7514
|
*
|
|
7258
|
-
* @param input - The input
|
|
7259
|
-
* @
|
|
7260
|
-
* @
|
|
7515
|
+
* @param input - The input video as Uint8Array or Stream (MPEG-TS only for streaming)
|
|
7516
|
+
* @param options - Resize parameters including width, height, and aspect ratio
|
|
7517
|
+
* @param streamOptions - Optional streaming configuration including input format hint
|
|
7518
|
+
* @returns An Effect that resolves to a Stream of the resized video bytes
|
|
7519
|
+
* @throws {UploadistaError} When video resizing fails
|
|
7261
7520
|
*
|
|
7262
7521
|
* @example
|
|
7263
7522
|
* ```typescript
|
|
7264
7523
|
* const program = Effect.gen(function* () {
|
|
7265
|
-
* const
|
|
7266
|
-
* const
|
|
7267
|
-
*
|
|
7268
|
-
*
|
|
7269
|
-
*
|
|
7524
|
+
* const videoPlugin = yield* VideoPlugin;
|
|
7525
|
+
* const inputStream = yield* dataStore.readStream(fileId);
|
|
7526
|
+
* const outputStream = yield* videoPlugin.resizeStream(inputStream, {
|
|
7527
|
+
* width: 1280,
|
|
7528
|
+
* height: 720,
|
|
7529
|
+
* aspectRatio: "keep"
|
|
7530
|
+
* });
|
|
7531
|
+
* return outputStream;
|
|
7270
7532
|
* });
|
|
7271
7533
|
* ```
|
|
7272
7534
|
*/
|
|
7273
|
-
|
|
7535
|
+
resizeStream?: (input: VideoStreamInput, options: ResizeVideoParams, streamOptions?: VideoStreamOptions) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;
|
|
7274
7536
|
/**
|
|
7275
|
-
*
|
|
7537
|
+
* Trims a video using streaming for memory-efficient processing of large files.
|
|
7276
7538
|
*
|
|
7277
|
-
*
|
|
7278
|
-
*
|
|
7539
|
+
* This method outputs the trimmed video as a stream, reducing peak memory usage.
|
|
7540
|
+
* For input, it accepts either a buffered Uint8Array or a Stream. Streaming input
|
|
7541
|
+
* is only supported for MPEG-TS format; other formats will be buffered internally.
|
|
7542
|
+
*
|
|
7543
|
+
* @param input - The input video as Uint8Array or Stream (MPEG-TS only for streaming)
|
|
7544
|
+
* @param options - Trim parameters including start time and end time/duration
|
|
7545
|
+
* @param streamOptions - Optional streaming configuration including input format hint
|
|
7546
|
+
* @returns An Effect that resolves to a Stream of the trimmed video bytes
|
|
7547
|
+
* @throws {UploadistaError} When video trimming fails
|
|
7279
7548
|
*
|
|
7280
7549
|
* @example
|
|
7281
7550
|
* ```typescript
|
|
7282
7551
|
* const program = Effect.gen(function* () {
|
|
7283
|
-
* const
|
|
7284
|
-
* const
|
|
7285
|
-
*
|
|
7552
|
+
* const videoPlugin = yield* VideoPlugin;
|
|
7553
|
+
* const inputStream = yield* dataStore.readStream(fileId);
|
|
7554
|
+
* const outputStream = yield* videoPlugin.trimStream(inputStream, {
|
|
7555
|
+
* startTime: 10,
|
|
7556
|
+
* endTime: 30
|
|
7557
|
+
* });
|
|
7558
|
+
* return outputStream;
|
|
7286
7559
|
* });
|
|
7287
7560
|
* ```
|
|
7288
7561
|
*/
|
|
7289
|
-
|
|
7290
|
-
};
|
|
7291
|
-
declare const VirusScanPlugin_base: Context.TagClass<VirusScanPlugin, "VirusScanPlugin", VirusScanPluginShape>;
|
|
7292
|
-
/**
|
|
7293
|
-
* Context tag for the Virus Scan Plugin.
|
|
7294
|
-
*
|
|
7295
|
-
* This tag provides a type-safe way to access virus scanning functionality
|
|
7296
|
-
* throughout the application using Effect's dependency injection system.
|
|
7297
|
-
*
|
|
7298
|
-
* @example
|
|
7299
|
-
* ```typescript
|
|
7300
|
-
* import { VirusScanPlugin } from "@uploadista/core/flow/plugins";
|
|
7301
|
-
*
|
|
7302
|
-
* // In your flow node
|
|
7303
|
-
* const program = Effect.gen(function* () {
|
|
7304
|
-
* const virusScanPlugin = yield* VirusScanPlugin;
|
|
7305
|
-
* const result = yield* virusScanPlugin.scan(fileData);
|
|
7306
|
-
*
|
|
7307
|
-
* if (!result.isClean) {
|
|
7308
|
-
* // Handle infected file
|
|
7309
|
-
* return Effect.fail(new UploadistaError({
|
|
7310
|
-
* code: "VIRUS_DETECTED",
|
|
7311
|
-
* message: `Viruses detected: ${result.detectedViruses.join(', ')}`
|
|
7312
|
-
* }));
|
|
7313
|
-
* }
|
|
7314
|
-
*
|
|
7315
|
-
* return fileData;
|
|
7316
|
-
* });
|
|
7317
|
-
* ```
|
|
7318
|
-
*/
|
|
7319
|
-
declare class VirusScanPlugin extends VirusScanPlugin_base {}
|
|
7320
|
-
type VirusScanPluginLayer = Layer.Layer<VirusScanPlugin, never, never>;
|
|
7321
|
-
//#endregion
|
|
7322
|
-
//#region src/flow/plugins/zip-plugin.d.ts
|
|
7323
|
-
/**
|
|
7324
|
-
* Parameters for creating a ZIP archive.
|
|
7325
|
-
*/
|
|
7326
|
-
type ZipParams = {
|
|
7327
|
-
/** Name of the ZIP file to create */
|
|
7328
|
-
zipName: string;
|
|
7329
|
-
/** Whether to include file metadata in the ZIP archive */
|
|
7330
|
-
includeMetadata: boolean;
|
|
7331
|
-
};
|
|
7332
|
-
/**
|
|
7333
|
-
* Input data structure for ZIP operations.
|
|
7334
|
-
* Represents a single file to be included in the ZIP archive.
|
|
7335
|
-
*/
|
|
7336
|
-
type ZipInput = {
|
|
7337
|
-
/** Unique identifier for the file */
|
|
7338
|
-
id: string;
|
|
7339
|
-
/** Binary data of the file */
|
|
7340
|
-
data: Uint8Array;
|
|
7341
|
-
/** File metadata including name, size, type, etc. */
|
|
7342
|
-
metadata: UploadFile["metadata"];
|
|
7343
|
-
};
|
|
7344
|
-
/**
|
|
7345
|
-
* Shape definition for the ZIP Plugin interface.
|
|
7346
|
-
* Defines the contract that all ZIP implementations must follow.
|
|
7347
|
-
*/
|
|
7348
|
-
type ZipPluginShape = {
|
|
7562
|
+
trimStream?: (input: VideoStreamInput, options: TrimVideoParams, streamOptions?: VideoStreamOptions) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;
|
|
7349
7563
|
/**
|
|
7350
|
-
*
|
|
7351
|
-
*
|
|
7352
|
-
* @param inputs - Array of files to include in the ZIP archive
|
|
7353
|
-
* @param options - Configuration options for the ZIP creation
|
|
7354
|
-
* @returns An Effect that resolves to the ZIP file as a Uint8Array
|
|
7355
|
-
* @throws {UploadistaError} When ZIP creation fails
|
|
7564
|
+
* Indicates whether this plugin supports streaming operations.
|
|
7565
|
+
* Returns true if streaming methods are available and functional.
|
|
7356
7566
|
*/
|
|
7357
|
-
|
|
7358
|
-
};
|
|
7359
|
-
declare const ZipPlugin_base: Context.TagClass<ZipPlugin, "ZipPlugin", ZipPluginShape>;
|
|
7360
|
-
/**
|
|
7361
|
-
* Context tag for the ZIP Plugin.
|
|
7362
|
-
*
|
|
7363
|
-
* This tag provides a type-safe way to access ZIP functionality
|
|
7364
|
-
* throughout the application using Effect's dependency injection system.
|
|
7365
|
-
*
|
|
7366
|
-
* @example
|
|
7367
|
-
* ```typescript
|
|
7368
|
-
* import { ZipPlugin } from "@uploadista/core/flow/plugins";
|
|
7369
|
-
*
|
|
7370
|
-
* // In your flow node
|
|
7371
|
-
* const program = Effect.gen(function* () {
|
|
7372
|
-
* const zipPlugin = yield* ZipPlugin;
|
|
7373
|
-
* const zipData = yield* zipPlugin.zip(files, { zipName: "archive.zip", includeMetadata: true });
|
|
7374
|
-
* return zipData;
|
|
7375
|
-
* });
|
|
7376
|
-
* ```
|
|
7377
|
-
*/
|
|
7378
|
-
declare class ZipPlugin extends ZipPlugin_base {}
|
|
7379
|
-
type ZipPluginLayer = Layer.Layer<ZipPlugin, never, never>;
|
|
7380
|
-
//#endregion
|
|
7381
|
-
//#region src/flow/plugins/plugins.d.ts
|
|
7382
|
-
type Plugin = ImagePlugin | ImageAiPlugin | VideoPlugin | DocumentPlugin | DocumentAiPlugin | VirusScanPlugin | CredentialProvider | ZipPlugin;
|
|
7383
|
-
type PluginLayer = ImagePluginLayer | ImageAiPluginLayer | VideoPluginLayer | DocumentPluginLayer | DocumentAiPluginLayer | VirusScanPluginLayer | CredentialProviderLayer | ZipPluginLayer;
|
|
7384
|
-
//#endregion
|
|
7385
|
-
//#region src/flow/plugins/types/describe-image-node.d.ts
|
|
7386
|
-
/**
|
|
7387
|
-
* Zod schema for validating describe image node parameters.
|
|
7388
|
-
* Defines the structure and validation rules for image description requests.
|
|
7389
|
-
*/
|
|
7390
|
-
declare const describeImageParamsSchema: z.ZodObject<{
|
|
7391
|
-
serviceType: z.ZodOptional<z.ZodEnum<{
|
|
7392
|
-
replicate: "replicate";
|
|
7393
|
-
}>>;
|
|
7394
|
-
}, z.core.$strip>;
|
|
7395
|
-
/**
|
|
7396
|
-
* Parameters for the describe image node.
|
|
7397
|
-
* Controls which AI service to use for generating image descriptions.
|
|
7398
|
-
*/
|
|
7399
|
-
type DescribeImageParams = z.infer<typeof describeImageParamsSchema>;
|
|
7400
|
-
//#endregion
|
|
7401
|
-
//#region src/flow/plugins/types/remove-background-node.d.ts
|
|
7402
|
-
/**
|
|
7403
|
-
* Zod schema for validating remove background node parameters.
|
|
7404
|
-
* Defines the structure and validation rules for background removal requests.
|
|
7405
|
-
*/
|
|
7406
|
-
declare const removeBackgroundParamsSchema: z.ZodObject<{
|
|
7407
|
-
serviceType: z.ZodOptional<z.ZodEnum<{
|
|
7408
|
-
replicate: "replicate";
|
|
7409
|
-
}>>;
|
|
7410
|
-
}, z.core.$strip>;
|
|
7411
|
-
/**
|
|
7412
|
-
* Parameters for the remove background node.
|
|
7413
|
-
* Controls which AI service to use for background removal processing.
|
|
7414
|
-
*/
|
|
7415
|
-
type RemoveBackgroundParams = z.infer<typeof removeBackgroundParamsSchema>;
|
|
7416
|
-
//#endregion
|
|
7417
|
-
//#region src/flow/type-guards.d.ts
|
|
7418
|
-
/**
|
|
7419
|
-
* A narrowed typed output with a specific node type and data type.
|
|
7420
|
-
* Unlike TypedOutput<T>, this type has a required nodeType field and
|
|
7421
|
-
* excludes BuiltInTypedOutput from the union, providing better type narrowing.
|
|
7422
|
-
*
|
|
7423
|
-
* @template T - The TypeScript type of the output data
|
|
7424
|
-
* @template TNodeType - The literal string type of the node type ID
|
|
7425
|
-
*/
|
|
7426
|
-
type NarrowedTypedOutput<T, TNodeType extends string = string> = {
|
|
7427
|
-
nodeType: TNodeType;
|
|
7428
|
-
data: T;
|
|
7429
|
-
nodeId: string;
|
|
7430
|
-
timestamp: string;
|
|
7567
|
+
supportsStreaming?: boolean;
|
|
7431
7568
|
};
|
|
7569
|
+
declare const VideoPlugin_base: Context.TagClass<VideoPlugin, "VideoPlugin", VideoPluginShape>;
|
|
7432
7570
|
/**
|
|
7433
|
-
*
|
|
7434
|
-
*
|
|
7435
|
-
* Creates a TypeScript type guard that validates both the type tag and
|
|
7436
|
-
* the data structure against the registered schema. This enables type-safe
|
|
7437
|
-
* narrowing of TypedOutput objects in TypeScript.
|
|
7438
|
-
*
|
|
7439
|
-
* @template T - The expected TypeScript type after narrowing
|
|
7440
|
-
* @template TNodeType - The literal string type of the node type ID
|
|
7441
|
-
* @param typeId - The registered type ID to check against (e.g., "storage-output-v1")
|
|
7442
|
-
* @returns A type guard function that narrows TypedOutput to NarrowedTypedOutput<T, TNodeType>
|
|
7443
|
-
*
|
|
7444
|
-
* @example
|
|
7445
|
-
* ```typescript
|
|
7446
|
-
* import { createTypeGuard } from "@uploadista/core/flow";
|
|
7447
|
-
* import { z } from "zod";
|
|
7448
|
-
*
|
|
7449
|
-
* const descriptionSchema = z.object({
|
|
7450
|
-
* description: z.string(),
|
|
7451
|
-
* confidence: z.number(),
|
|
7452
|
-
* });
|
|
7453
|
-
*
|
|
7454
|
-
* type DescriptionOutput = z.infer<typeof descriptionSchema>;
|
|
7455
|
-
*
|
|
7456
|
-
* const isDescriptionOutput = createTypeGuard<DescriptionOutput>(
|
|
7457
|
-
* "description-output-v1"
|
|
7458
|
-
* );
|
|
7459
|
-
*
|
|
7460
|
-
* // Use in code
|
|
7461
|
-
* if (isDescriptionOutput(output)) {
|
|
7462
|
-
* // output.data is typed as DescriptionOutput
|
|
7463
|
-
* console.log(output.data.description);
|
|
7464
|
-
* }
|
|
7465
|
-
* ```
|
|
7466
|
-
*/
|
|
7467
|
-
declare function createTypeGuard<T, TNodeType extends string = string>(typeId: TNodeType): (output: TypedOutput) => output is NarrowedTypedOutput<T, TNodeType>;
|
|
7468
|
-
/**
|
|
7469
|
-
* Type guard for UploadFile objects.
|
|
7470
|
-
*
|
|
7471
|
-
* Validates that a value is a valid UploadFile by checking its structure against the schema.
|
|
7472
|
-
* This is useful for determining if a node result is an UploadFile, which affects
|
|
7473
|
-
* auto-persistence and intermediate file tracking.
|
|
7474
|
-
*
|
|
7475
|
-
* @param value - The value to check
|
|
7476
|
-
* @returns True if the value is a valid UploadFile
|
|
7477
|
-
*
|
|
7478
|
-
* @example
|
|
7479
|
-
* ```typescript
|
|
7480
|
-
* import { isUploadFile } from "@uploadista/core/flow";
|
|
7481
|
-
*
|
|
7482
|
-
* if (isUploadFile(nodeResult)) {
|
|
7483
|
-
* // nodeResult is typed as UploadFile
|
|
7484
|
-
* console.log("File ID:", nodeResult.id);
|
|
7485
|
-
* console.log("Storage:", nodeResult.storage.id);
|
|
7486
|
-
* }
|
|
7487
|
-
* ```
|
|
7488
|
-
*/
|
|
7489
|
-
declare function isUploadFile(value: unknown): value is UploadFile;
|
|
7490
|
-
/**
|
|
7491
|
-
* Type guard for storage output nodes.
|
|
7492
|
-
*
|
|
7493
|
-
* Validates that an output is from a storage node and contains valid UploadFile data.
|
|
7571
|
+
* Context tag for the Video Plugin.
|
|
7494
7572
|
*
|
|
7495
|
-
*
|
|
7496
|
-
*
|
|
7573
|
+
* This tag provides a type-safe way to access video processing functionality
|
|
7574
|
+
* throughout the application using Effect's dependency injection system.
|
|
7497
7575
|
*
|
|
7498
7576
|
* @example
|
|
7499
7577
|
* ```typescript
|
|
7500
|
-
* import {
|
|
7578
|
+
* import { VideoPlugin } from "@uploadista/core/flow/plugins";
|
|
7501
7579
|
*
|
|
7502
|
-
*
|
|
7503
|
-
*
|
|
7504
|
-
*
|
|
7505
|
-
*
|
|
7506
|
-
* }
|
|
7580
|
+
* // In your flow node
|
|
7581
|
+
* const program = Effect.gen(function* () {
|
|
7582
|
+
* const videoPlugin = yield* VideoPlugin;
|
|
7583
|
+
* const transcoded = yield* videoPlugin.transcode(videoData, { format: "webm", codec: "vp9" });
|
|
7584
|
+
* const resized = yield* videoPlugin.resize(transcoded, { width: 1280, height: 720, aspectRatio: "keep" });
|
|
7585
|
+
* return resized;
|
|
7586
|
+
* });
|
|
7507
7587
|
* ```
|
|
7508
7588
|
*/
|
|
7509
|
-
declare
|
|
7589
|
+
declare class VideoPlugin extends VideoPlugin_base {}
|
|
7590
|
+
type VideoPluginLayer = Layer.Layer<VideoPlugin, never, never>;
|
|
7591
|
+
//#endregion
|
|
7592
|
+
//#region src/flow/plugins/virus-scan-plugin.d.ts
|
|
7510
7593
|
/**
|
|
7511
|
-
*
|
|
7512
|
-
*
|
|
7513
|
-
* Validates that an output is from an OCR node and contains valid structured OCR data.
|
|
7514
|
-
*
|
|
7515
|
-
* @param output - The output to check
|
|
7516
|
-
* @returns True if the output is an OCR output with valid structured text data
|
|
7517
|
-
*
|
|
7518
|
-
* @example
|
|
7519
|
-
* ```typescript
|
|
7520
|
-
* import { isOcrOutput } from "@uploadista/core/flow";
|
|
7521
|
-
*
|
|
7522
|
-
* if (isOcrOutput(output)) {
|
|
7523
|
-
* // output.data is typed as OcrOutput
|
|
7524
|
-
* console.log("Extracted text:", output.data.extractedText);
|
|
7525
|
-
* console.log("Format:", output.data.format);
|
|
7526
|
-
* console.log("Task type:", output.data.taskType);
|
|
7527
|
-
* }
|
|
7528
|
-
* ```
|
|
7594
|
+
* Result of a virus scan operation.
|
|
7529
7595
|
*/
|
|
7530
|
-
|
|
7531
|
-
|
|
7532
|
-
|
|
7533
|
-
|
|
7534
|
-
|
|
7535
|
-
|
|
7596
|
+
type ScanResult = {
|
|
7597
|
+
/**
|
|
7598
|
+
* Whether the file is clean (no viruses detected)
|
|
7599
|
+
*/
|
|
7600
|
+
isClean: boolean;
|
|
7601
|
+
/**
|
|
7602
|
+
* Array of detected virus/malware names (empty if clean)
|
|
7603
|
+
*/
|
|
7604
|
+
detectedViruses: string[];
|
|
7605
|
+
};
|
|
7536
7606
|
/**
|
|
7537
|
-
*
|
|
7538
|
-
*
|
|
7539
|
-
* Validates that an output is from an image description node and contains valid description data.
|
|
7540
|
-
*
|
|
7541
|
-
* @param output - The output to check
|
|
7542
|
-
* @returns True if the output is an image description output with valid description data
|
|
7543
|
-
*
|
|
7544
|
-
* @example
|
|
7545
|
-
* ```typescript
|
|
7546
|
-
* import { isImageDescriptionOutput } from "@uploadista/core/flow";
|
|
7547
|
-
*
|
|
7548
|
-
* if (isImageDescriptionOutput(output)) {
|
|
7549
|
-
* // output.data is typed as ImageDescriptionOutput
|
|
7550
|
-
* console.log("Description:", output.data.description);
|
|
7551
|
-
* console.log("Confidence:", output.data.confidence);
|
|
7552
|
-
* }
|
|
7553
|
-
* ```
|
|
7607
|
+
* Comprehensive metadata about a virus scan operation.
|
|
7554
7608
|
*/
|
|
7555
|
-
|
|
7556
|
-
|
|
7557
|
-
|
|
7558
|
-
|
|
7559
|
-
|
|
7609
|
+
type ScanMetadata = {
|
|
7610
|
+
/**
|
|
7611
|
+
* Whether the file was scanned
|
|
7612
|
+
*/
|
|
7613
|
+
scanned: boolean;
|
|
7614
|
+
/**
|
|
7615
|
+
* Whether the file is clean (no viruses detected)
|
|
7616
|
+
*/
|
|
7617
|
+
isClean: boolean;
|
|
7618
|
+
/**
|
|
7619
|
+
* Array of detected virus/malware names (empty if clean)
|
|
7620
|
+
*/
|
|
7621
|
+
detectedViruses: string[];
|
|
7622
|
+
/**
|
|
7623
|
+
* ISO 8601 timestamp of when the scan was performed
|
|
7624
|
+
*/
|
|
7625
|
+
scanDate: string;
|
|
7626
|
+
/**
|
|
7627
|
+
* Version of the antivirus engine used
|
|
7628
|
+
*/
|
|
7629
|
+
engineVersion: string;
|
|
7630
|
+
/**
|
|
7631
|
+
* ISO 8601 timestamp of when virus definitions were last updated
|
|
7632
|
+
*/
|
|
7633
|
+
definitionsDate: string;
|
|
7634
|
+
};
|
|
7560
7635
|
/**
|
|
7561
|
-
*
|
|
7562
|
-
*
|
|
7563
|
-
* This helper function filters outputs using a type guard and returns a
|
|
7564
|
-
* properly typed array of results. It's useful for extracting specific
|
|
7565
|
-
* output types from multi-output flows.
|
|
7566
|
-
*
|
|
7567
|
-
* @template TOutput - The expected narrowed output type
|
|
7568
|
-
* @param outputs - Array of typed outputs to filter
|
|
7569
|
-
* @param typeGuard - Type guard function to use for filtering
|
|
7570
|
-
* @returns Array of outputs that match the type guard, properly typed
|
|
7571
|
-
*
|
|
7572
|
-
* @example
|
|
7573
|
-
* ```typescript
|
|
7574
|
-
* import { filterOutputsByType, isStorageOutput } from "@uploadista/core/flow";
|
|
7575
|
-
*
|
|
7576
|
-
* // Get all storage outputs from a multi-output flow
|
|
7577
|
-
* const storageOutputs = filterOutputsByType(
|
|
7578
|
-
* flowResult.outputs,
|
|
7579
|
-
* isStorageOutput
|
|
7580
|
-
* );
|
|
7581
|
-
*
|
|
7582
|
-
* for (const output of storageOutputs) {
|
|
7583
|
-
* // Each output.data is typed as UploadFile
|
|
7584
|
-
* console.log("Saved file:", output.data.url);
|
|
7585
|
-
* }
|
|
7586
|
-
* ```
|
|
7636
|
+
* Shape definition for the Virus Scan Plugin interface.
|
|
7637
|
+
* Defines the contract that all virus scanning implementations must follow.
|
|
7587
7638
|
*/
|
|
7588
|
-
|
|
7639
|
+
type VirusScanPluginShape = {
|
|
7640
|
+
/**
|
|
7641
|
+
* Scans a file for viruses and malware.
|
|
7642
|
+
*
|
|
7643
|
+
* @param input - The input file as a Uint8Array
|
|
7644
|
+
* @returns An Effect that resolves to ScanResult with detection information
|
|
7645
|
+
* @throws {UploadistaError} When virus scanning fails or ClamAV is unavailable
|
|
7646
|
+
*
|
|
7647
|
+
* @example
|
|
7648
|
+
* ```typescript
|
|
7649
|
+
* const program = Effect.gen(function* () {
|
|
7650
|
+
* const virusScanPlugin = yield* VirusScanPlugin;
|
|
7651
|
+
* const result = yield* virusScanPlugin.scan(fileData);
|
|
7652
|
+
* if (!result.isClean) {
|
|
7653
|
+
* console.log('Viruses detected:', result.detectedViruses);
|
|
7654
|
+
* }
|
|
7655
|
+
* });
|
|
7656
|
+
* ```
|
|
7657
|
+
*/
|
|
7658
|
+
scan: (input: Uint8Array) => Effect.Effect<ScanResult, UploadistaError>;
|
|
7659
|
+
/**
|
|
7660
|
+
* Retrieves the version of the antivirus engine.
|
|
7661
|
+
*
|
|
7662
|
+
* @returns An Effect that resolves to the engine version string
|
|
7663
|
+
* @throws {UploadistaError} When version retrieval fails
|
|
7664
|
+
*
|
|
7665
|
+
* @example
|
|
7666
|
+
* ```typescript
|
|
7667
|
+
* const program = Effect.gen(function* () {
|
|
7668
|
+
* const virusScanPlugin = yield* VirusScanPlugin;
|
|
7669
|
+
* const version = yield* virusScanPlugin.getVersion();
|
|
7670
|
+
* console.log('ClamAV version:', version);
|
|
7671
|
+
* });
|
|
7672
|
+
* ```
|
|
7673
|
+
*/
|
|
7674
|
+
getVersion: () => Effect.Effect<string, UploadistaError>;
|
|
7675
|
+
};
|
|
7676
|
+
declare const VirusScanPlugin_base: Context.TagClass<VirusScanPlugin, "VirusScanPlugin", VirusScanPluginShape>;
|
|
7589
7677
|
/**
|
|
7590
|
-
*
|
|
7591
|
-
*
|
|
7592
|
-
* This helper function finds exactly one output matching the type guard.
|
|
7593
|
-
* It throws an error if no outputs match or if multiple outputs match,
|
|
7594
|
-
* ensuring the caller receives exactly the expected result.
|
|
7678
|
+
* Context tag for the Virus Scan Plugin.
|
|
7595
7679
|
*
|
|
7596
|
-
*
|
|
7597
|
-
*
|
|
7598
|
-
* @param typeGuard - Type guard function to use for matching
|
|
7599
|
-
* @returns The single matching output, properly typed
|
|
7600
|
-
* @throws {UploadistaError} If no outputs match (OUTPUT_NOT_FOUND)
|
|
7601
|
-
* @throws {UploadistaError} If multiple outputs match (MULTIPLE_OUTPUTS_FOUND)
|
|
7680
|
+
* This tag provides a type-safe way to access virus scanning functionality
|
|
7681
|
+
* throughout the application using Effect's dependency injection system.
|
|
7602
7682
|
*
|
|
7603
7683
|
* @example
|
|
7604
7684
|
* ```typescript
|
|
7605
|
-
* import {
|
|
7685
|
+
* import { VirusScanPlugin } from "@uploadista/core/flow/plugins";
|
|
7606
7686
|
*
|
|
7607
|
-
*
|
|
7608
|
-
*
|
|
7609
|
-
*
|
|
7610
|
-
*
|
|
7611
|
-
*
|
|
7612
|
-
*
|
|
7613
|
-
*
|
|
7614
|
-
*
|
|
7615
|
-
*
|
|
7616
|
-
*
|
|
7617
|
-
*
|
|
7618
|
-
* console.error("Multiple storage outputs found, expected one");
|
|
7687
|
+
* // In your flow node
|
|
7688
|
+
* const program = Effect.gen(function* () {
|
|
7689
|
+
* const virusScanPlugin = yield* VirusScanPlugin;
|
|
7690
|
+
* const result = yield* virusScanPlugin.scan(fileData);
|
|
7691
|
+
*
|
|
7692
|
+
* if (!result.isClean) {
|
|
7693
|
+
* // Handle infected file
|
|
7694
|
+
* return Effect.fail(new UploadistaError({
|
|
7695
|
+
* code: "VIRUS_DETECTED",
|
|
7696
|
+
* message: `Viruses detected: ${result.detectedViruses.join(', ')}`
|
|
7697
|
+
* }));
|
|
7619
7698
|
* }
|
|
7620
|
-
*
|
|
7699
|
+
*
|
|
7700
|
+
* return fileData;
|
|
7701
|
+
* });
|
|
7621
7702
|
* ```
|
|
7622
7703
|
*/
|
|
7623
|
-
declare
|
|
7704
|
+
declare class VirusScanPlugin extends VirusScanPlugin_base {}
|
|
7705
|
+
type VirusScanPluginLayer = Layer.Layer<VirusScanPlugin, never, never>;
|
|
7706
|
+
//#endregion
|
|
7707
|
+
//#region src/flow/plugins/zip-plugin.d.ts
|
|
7708
|
+
/**
|
|
7709
|
+
* Parameters for creating a ZIP archive.
|
|
7710
|
+
*/
|
|
7711
|
+
type ZipParams = {
|
|
7712
|
+
/** Name of the ZIP file to create */
|
|
7713
|
+
zipName: string;
|
|
7714
|
+
/** Whether to include file metadata in the ZIP archive */
|
|
7715
|
+
includeMetadata: boolean;
|
|
7716
|
+
};
|
|
7717
|
+
/**
|
|
7718
|
+
* Input data structure for ZIP operations.
|
|
7719
|
+
* Represents a single file to be included in the ZIP archive.
|
|
7720
|
+
*/
|
|
7721
|
+
type ZipInput = {
|
|
7722
|
+
/** Unique identifier for the file */
|
|
7723
|
+
id: string;
|
|
7724
|
+
/** Binary data of the file */
|
|
7725
|
+
data: Uint8Array;
|
|
7726
|
+
/** File metadata including name, size, type, etc. */
|
|
7727
|
+
metadata: UploadFile["metadata"];
|
|
7728
|
+
};
|
|
7729
|
+
/**
|
|
7730
|
+
* Shape definition for the ZIP Plugin interface.
|
|
7731
|
+
* Defines the contract that all ZIP implementations must follow.
|
|
7732
|
+
*/
|
|
7733
|
+
type ZipPluginShape = {
|
|
7734
|
+
/**
|
|
7735
|
+
* Creates a ZIP archive from multiple input files.
|
|
7736
|
+
*
|
|
7737
|
+
* @param inputs - Array of files to include in the ZIP archive
|
|
7738
|
+
* @param options - Configuration options for the ZIP creation
|
|
7739
|
+
* @returns An Effect that resolves to the ZIP file as a Uint8Array
|
|
7740
|
+
* @throws {UploadistaError} When ZIP creation fails
|
|
7741
|
+
*/
|
|
7742
|
+
zip: (inputs: ZipInput[], options: ZipParams) => Effect.Effect<Uint8Array, UploadistaError>;
|
|
7743
|
+
};
|
|
7744
|
+
declare const ZipPlugin_base: Context.TagClass<ZipPlugin, "ZipPlugin", ZipPluginShape>;
|
|
7624
7745
|
/**
|
|
7625
|
-
*
|
|
7626
|
-
*
|
|
7627
|
-
* Unlike getSingleOutputByType, this function returns undefined if no outputs
|
|
7628
|
-
* match, and returns the first match if multiple outputs exist. This is useful
|
|
7629
|
-
* when you want a more lenient matching strategy.
|
|
7746
|
+
* Context tag for the ZIP Plugin.
|
|
7630
7747
|
*
|
|
7631
|
-
*
|
|
7632
|
-
*
|
|
7633
|
-
* @param typeGuard - Type guard function to use for matching
|
|
7634
|
-
* @returns The first matching output, or undefined if none match
|
|
7748
|
+
* This tag provides a type-safe way to access ZIP functionality
|
|
7749
|
+
* throughout the application using Effect's dependency injection system.
|
|
7635
7750
|
*
|
|
7636
7751
|
* @example
|
|
7637
7752
|
* ```typescript
|
|
7638
|
-
* import {
|
|
7639
|
-
*
|
|
7640
|
-
* const storageOutput = getFirstOutputByType(
|
|
7641
|
-
* flowResult.outputs,
|
|
7642
|
-
* isStorageOutput
|
|
7643
|
-
* );
|
|
7753
|
+
* import { ZipPlugin } from "@uploadista/core/flow/plugins";
|
|
7644
7754
|
*
|
|
7645
|
-
*
|
|
7646
|
-
*
|
|
7647
|
-
*
|
|
7648
|
-
*
|
|
7649
|
-
*
|
|
7755
|
+
* // In your flow node
|
|
7756
|
+
* const program = Effect.gen(function* () {
|
|
7757
|
+
* const zipPlugin = yield* ZipPlugin;
|
|
7758
|
+
* const zipData = yield* zipPlugin.zip(files, { zipName: "archive.zip", includeMetadata: true });
|
|
7759
|
+
* return zipData;
|
|
7760
|
+
* });
|
|
7650
7761
|
* ```
|
|
7651
7762
|
*/
|
|
7652
|
-
declare
|
|
7763
|
+
declare class ZipPlugin extends ZipPlugin_base {}
|
|
7764
|
+
type ZipPluginLayer = Layer.Layer<ZipPlugin, never, never>;
|
|
7765
|
+
//#endregion
|
|
7766
|
+
//#region src/flow/plugins/plugins.d.ts
|
|
7767
|
+
type Plugin = ImagePlugin | ImageAiPlugin | VideoPlugin | DocumentPlugin | DocumentAiPlugin | VirusScanPlugin | CredentialProvider | ZipPlugin;
|
|
7768
|
+
type PluginLayer = ImagePluginLayer | ImageAiPluginLayer | VideoPluginLayer | DocumentPluginLayer | DocumentAiPluginLayer | VirusScanPluginLayer | CredentialProviderLayer | ZipPluginLayer;
|
|
7769
|
+
//#endregion
|
|
7770
|
+
//#region src/flow/plugins/types/describe-image-node.d.ts
|
|
7653
7771
|
/**
|
|
7654
|
-
*
|
|
7655
|
-
*
|
|
7656
|
-
* This helper finds an output produced by a specific node instance,
|
|
7657
|
-
* regardless of its type. Useful when you know the specific node ID
|
|
7658
|
-
* you're looking for.
|
|
7659
|
-
*
|
|
7660
|
-
* @param outputs - Array of typed outputs to search
|
|
7661
|
-
* @param nodeId - The node ID to match
|
|
7662
|
-
* @returns The output from the specified node, or undefined if not found
|
|
7663
|
-
*
|
|
7664
|
-
* @example
|
|
7665
|
-
* ```typescript
|
|
7666
|
-
* import { getOutputByNodeId } from "@uploadista/core/flow";
|
|
7667
|
-
*
|
|
7668
|
-
* const cdnOutput = getOutputByNodeId(flowResult.outputs, "cdn-storage");
|
|
7669
|
-
* if (cdnOutput) {
|
|
7670
|
-
* console.log("CDN output:", cdnOutput.data);
|
|
7671
|
-
* }
|
|
7672
|
-
* ```
|
|
7772
|
+
* Zod schema for validating describe image node parameters.
|
|
7773
|
+
* Defines the structure and validation rules for image description requests.
|
|
7673
7774
|
*/
|
|
7674
|
-
declare
|
|
7775
|
+
declare const describeImageParamsSchema: z.ZodObject<{
|
|
7776
|
+
serviceType: z.ZodOptional<z.ZodEnum<{
|
|
7777
|
+
replicate: "replicate";
|
|
7778
|
+
}>>;
|
|
7779
|
+
}, z.core.$strip>;
|
|
7675
7780
|
/**
|
|
7676
|
-
*
|
|
7677
|
-
*
|
|
7678
|
-
* Simple predicate function to check if at least one output of a given
|
|
7679
|
-
* type exists in the results.
|
|
7680
|
-
*
|
|
7681
|
-
* @template TOutput - The expected narrowed output type
|
|
7682
|
-
* @param outputs - Array of typed outputs to check
|
|
7683
|
-
* @param typeGuard - Type guard function to use for checking
|
|
7684
|
-
* @returns True if at least one output matches the type guard
|
|
7685
|
-
*
|
|
7686
|
-
* @example
|
|
7687
|
-
* ```typescript
|
|
7688
|
-
* import { hasOutputOfType, isStorageOutput } from "@uploadista/core/flow";
|
|
7689
|
-
*
|
|
7690
|
-
* if (hasOutputOfType(flowResult.outputs, isStorageOutput)) {
|
|
7691
|
-
* console.log("Flow produced at least one storage output");
|
|
7692
|
-
* } else {
|
|
7693
|
-
* console.log("No storage outputs in this flow");
|
|
7694
|
-
* }
|
|
7695
|
-
* ```
|
|
7781
|
+
* Parameters for the describe image node.
|
|
7782
|
+
* Controls which AI service to use for generating image descriptions.
|
|
7696
7783
|
*/
|
|
7697
|
-
|
|
7784
|
+
type DescribeImageParams = z.infer<typeof describeImageParamsSchema>;
|
|
7785
|
+
//#endregion
|
|
7786
|
+
//#region src/flow/plugins/types/remove-background-node.d.ts
|
|
7698
7787
|
/**
|
|
7699
|
-
*
|
|
7700
|
-
*
|
|
7701
|
-
* Checks if the input data is an init operation that starts a streaming
|
|
7702
|
-
* file upload session.
|
|
7703
|
-
*
|
|
7704
|
-
* @param data - Input data to check
|
|
7705
|
-
* @returns True if data is an init operation
|
|
7706
|
-
*
|
|
7707
|
-
* @example
|
|
7708
|
-
* ```typescript
|
|
7709
|
-
* if (isInitOperation(inputData)) {
|
|
7710
|
-
* console.log("Storage ID:", inputData.storageId);
|
|
7711
|
-
* console.log("Metadata:", inputData.metadata);
|
|
7712
|
-
* }
|
|
7713
|
-
* ```
|
|
7788
|
+
* Zod schema for validating remove background node parameters.
|
|
7789
|
+
* Defines the structure and validation rules for background removal requests.
|
|
7714
7790
|
*/
|
|
7715
|
-
declare
|
|
7716
|
-
|
|
7717
|
-
|
|
7791
|
+
declare const removeBackgroundParamsSchema: z.ZodObject<{
|
|
7792
|
+
serviceType: z.ZodOptional<z.ZodEnum<{
|
|
7793
|
+
replicate: "replicate";
|
|
7794
|
+
}>>;
|
|
7795
|
+
}, z.core.$strip>;
|
|
7718
7796
|
/**
|
|
7719
|
-
*
|
|
7720
|
-
*
|
|
7721
|
-
|
|
7722
|
-
|
|
7723
|
-
|
|
7724
|
-
|
|
7725
|
-
|
|
7797
|
+
* Parameters for the remove background node.
|
|
7798
|
+
* Controls which AI service to use for background removal processing.
|
|
7799
|
+
*/
|
|
7800
|
+
type RemoveBackgroundParams = z.infer<typeof removeBackgroundParamsSchema>;
|
|
7801
|
+
//#endregion
|
|
7802
|
+
//#region src/flow/type-guards.d.ts
|
|
7803
|
+
/**
|
|
7804
|
+
* A narrowed typed output with a specific node type and data type.
|
|
7805
|
+
* Unlike TypedOutput<T>, this type has a required nodeType field and
|
|
7806
|
+
* excludes BuiltInTypedOutput from the union, providing better type narrowing.
|
|
7726
7807
|
*
|
|
7727
|
-
* @
|
|
7728
|
-
*
|
|
7729
|
-
* if (isFinalizeOperation(inputData)) {
|
|
7730
|
-
* console.log("Upload ID:", inputData.uploadId);
|
|
7731
|
-
* }
|
|
7732
|
-
* ```
|
|
7808
|
+
* @template T - The TypeScript type of the output data
|
|
7809
|
+
* @template TNodeType - The literal string type of the node type ID
|
|
7733
7810
|
*/
|
|
7734
|
-
|
|
7735
|
-
|
|
7736
|
-
|
|
7811
|
+
type NarrowedTypedOutput<T, TNodeType extends string = string> = {
|
|
7812
|
+
nodeType: TNodeType;
|
|
7813
|
+
data: T;
|
|
7814
|
+
nodeId: string;
|
|
7815
|
+
timestamp: string;
|
|
7816
|
+
};
|
|
7737
7817
|
/**
|
|
7738
|
-
*
|
|
7818
|
+
* Factory function to create type guards for specific node types.
|
|
7739
7819
|
*
|
|
7740
|
-
*
|
|
7741
|
-
*
|
|
7820
|
+
* Creates a TypeScript type guard that validates both the type tag and
|
|
7821
|
+
* the data structure against the registered schema. This enables type-safe
|
|
7822
|
+
* narrowing of TypedOutput objects in TypeScript.
|
|
7742
7823
|
*
|
|
7743
|
-
* @
|
|
7744
|
-
* @
|
|
7824
|
+
* @template T - The expected TypeScript type after narrowing
|
|
7825
|
+
* @template TNodeType - The literal string type of the node type ID
|
|
7826
|
+
* @param typeId - The registered type ID to check against (e.g., "storage-output-v1")
|
|
7827
|
+
* @returns A type guard function that narrows TypedOutput to NarrowedTypedOutput<T, TNodeType>
|
|
7745
7828
|
*
|
|
7746
7829
|
* @example
|
|
7747
7830
|
* ```typescript
|
|
7748
|
-
*
|
|
7749
|
-
*
|
|
7750
|
-
* console.log("Optional storage:", inputData.storageId);
|
|
7751
|
-
* }
|
|
7752
|
-
* ```
|
|
7753
|
-
*/
|
|
7754
|
-
declare function isUrlOperation(data: InputData): data is Extract<InputData, {
|
|
7755
|
-
operation: "url";
|
|
7756
|
-
}>;
|
|
7757
|
-
/**
|
|
7758
|
-
* Type guard for upload operations (init or url).
|
|
7831
|
+
* import { createTypeGuard } from "@uploadista/core/flow";
|
|
7832
|
+
* import { z } from "zod";
|
|
7759
7833
|
*
|
|
7760
|
-
*
|
|
7761
|
-
*
|
|
7834
|
+
* const descriptionSchema = z.object({
|
|
7835
|
+
* description: z.string(),
|
|
7836
|
+
* confidence: z.number(),
|
|
7837
|
+
* });
|
|
7762
7838
|
*
|
|
7763
|
-
*
|
|
7764
|
-
* @returns True if data is an init or URL operation
|
|
7839
|
+
* type DescriptionOutput = z.infer<typeof descriptionSchema>;
|
|
7765
7840
|
*
|
|
7766
|
-
*
|
|
7767
|
-
*
|
|
7768
|
-
*
|
|
7769
|
-
*
|
|
7770
|
-
*
|
|
7771
|
-
*
|
|
7772
|
-
*
|
|
7773
|
-
*
|
|
7774
|
-
* }
|
|
7841
|
+
* const isDescriptionOutput = createTypeGuard<DescriptionOutput>(
|
|
7842
|
+
* "description-output-v1"
|
|
7843
|
+
* );
|
|
7844
|
+
*
|
|
7845
|
+
* // Use in code
|
|
7846
|
+
* if (isDescriptionOutput(output)) {
|
|
7847
|
+
* // output.data is typed as DescriptionOutput
|
|
7848
|
+
* console.log(output.data.description);
|
|
7775
7849
|
* }
|
|
7776
7850
|
* ```
|
|
7777
7851
|
*/
|
|
7778
|
-
declare function
|
|
7779
|
-
operation: "init" | "url";
|
|
7780
|
-
}>;
|
|
7781
|
-
//#endregion
|
|
7782
|
-
//#region src/flow/typed-flow.d.ts
|
|
7852
|
+
declare function createTypeGuard<T, TNodeType extends string = string>(typeId: TNodeType): (output: TypedOutput) => output is NarrowedTypedOutput<T, TNodeType>;
|
|
7783
7853
|
/**
|
|
7784
|
-
*
|
|
7785
|
-
*
|
|
7786
|
-
* A node definition can be either:
|
|
7787
|
-
* - A plain FlowNode object
|
|
7788
|
-
* - An Effect that resolves to a FlowNode (for nodes requiring dependencies)
|
|
7854
|
+
* Type guard for UploadFile objects.
|
|
7789
7855
|
*
|
|
7790
|
-
*
|
|
7791
|
-
*
|
|
7792
|
-
|
|
7793
|
-
type NodeDefinition<TNodeError = never, TNodeRequirements = never> = FlowNode<any, any, UploadistaError> | Effect.Effect<FlowNode<any, any, UploadistaError>, TNodeError, TNodeRequirements>;
|
|
7794
|
-
/**
|
|
7795
|
-
* A record mapping node IDs to their definitions.
|
|
7856
|
+
* Validates that a value is a valid UploadFile by checking its structure against the schema.
|
|
7857
|
+
* This is useful for determining if a node result is an UploadFile, which affects
|
|
7858
|
+
* auto-persistence and intermediate file tracking.
|
|
7796
7859
|
*
|
|
7797
|
-
*
|
|
7798
|
-
*
|
|
7860
|
+
* @param value - The value to check
|
|
7861
|
+
* @returns True if the value is a valid UploadFile
|
|
7799
7862
|
*
|
|
7800
7863
|
* @example
|
|
7801
7864
|
* ```typescript
|
|
7802
|
-
*
|
|
7803
|
-
* input: fileInputNode,
|
|
7804
|
-
* resize: Effect.succeed(imageResizeNode),
|
|
7805
|
-
* output: s3OutputNode
|
|
7806
|
-
* } satisfies NodeDefinitionsRecord;
|
|
7807
|
-
* ```
|
|
7808
|
-
*/
|
|
7809
|
-
type NodeDefinitionsRecord = Record<string, NodeDefinition<any, any>>;
|
|
7810
|
-
/**
|
|
7811
|
-
* Extracts the error type from a NodeDefinition.
|
|
7812
|
-
*
|
|
7813
|
-
* If the node is an Effect, extracts its error type.
|
|
7814
|
-
* If the node is a plain FlowNode, returns never (no errors).
|
|
7815
|
-
*/
|
|
7816
|
-
type NodeDefinitionError<T> = T extends Effect.Effect<FlowNode<any, any, UploadistaError>, infer TError, any> ? TError : never;
|
|
7817
|
-
/**
|
|
7818
|
-
* Extracts the requirements (dependencies) from a NodeDefinition.
|
|
7865
|
+
* import { isUploadFile } from "@uploadista/core/flow";
|
|
7819
7866
|
*
|
|
7820
|
-
*
|
|
7867
|
+
* if (isUploadFile(nodeResult)) {
|
|
7868
|
+
* // nodeResult is typed as UploadFile
|
|
7869
|
+
* console.log("File ID:", nodeResult.id);
|
|
7870
|
+
* console.log("Storage:", nodeResult.storage.id);
|
|
7871
|
+
* }
|
|
7872
|
+
* ```
|
|
7821
7873
|
*/
|
|
7822
|
-
|
|
7874
|
+
declare function isUploadFile(value: unknown): value is UploadFile;
|
|
7823
7875
|
/**
|
|
7824
|
-
*
|
|
7876
|
+
* Type guard for storage output nodes.
|
|
7825
7877
|
*
|
|
7826
|
-
*
|
|
7827
|
-
*
|
|
7878
|
+
* Validates that an output is from a storage node and contains valid UploadFile data.
|
|
7879
|
+
*
|
|
7880
|
+
* @param output - The output to check
|
|
7881
|
+
* @returns True if the output is a storage output with valid UploadFile data
|
|
7882
|
+
*
|
|
7883
|
+
* @example
|
|
7884
|
+
* ```typescript
|
|
7885
|
+
* import { isStorageOutput } from "@uploadista/core/flow";
|
|
7886
|
+
*
|
|
7887
|
+
* if (isStorageOutput(output)) {
|
|
7888
|
+
* // output.data is typed as UploadFile
|
|
7889
|
+
* console.log("File URL:", output.data.url);
|
|
7890
|
+
* console.log("File size:", output.data.size);
|
|
7891
|
+
* }
|
|
7892
|
+
* ```
|
|
7828
7893
|
*/
|
|
7829
|
-
|
|
7894
|
+
declare const isStorageOutput: (output: TypedOutput) => output is NarrowedTypedOutput<UploadFile, string>;
|
|
7830
7895
|
/**
|
|
7831
|
-
*
|
|
7896
|
+
* Type guard for OCR output nodes.
|
|
7832
7897
|
*
|
|
7833
|
-
*
|
|
7834
|
-
* requirement types into a single union type representing all services
|
|
7835
|
-
* needed by the flow.
|
|
7898
|
+
* Validates that an output is from an OCR node and contains valid structured OCR data.
|
|
7836
7899
|
*
|
|
7837
|
-
* @
|
|
7900
|
+
* @param output - The output to check
|
|
7901
|
+
* @returns True if the output is an OCR output with valid structured text data
|
|
7838
7902
|
*
|
|
7839
7903
|
* @example
|
|
7840
7904
|
* ```typescript
|
|
7841
|
-
*
|
|
7842
|
-
*
|
|
7843
|
-
*
|
|
7844
|
-
*
|
|
7845
|
-
*
|
|
7846
|
-
*
|
|
7905
|
+
* import { isOcrOutput } from "@uploadista/core/flow";
|
|
7906
|
+
*
|
|
7907
|
+
* if (isOcrOutput(output)) {
|
|
7908
|
+
* // output.data is typed as OcrOutput
|
|
7909
|
+
* console.log("Extracted text:", output.data.extractedText);
|
|
7910
|
+
* console.log("Format:", output.data.format);
|
|
7911
|
+
* console.log("Task type:", output.data.taskType);
|
|
7912
|
+
* }
|
|
7847
7913
|
* ```
|
|
7848
7914
|
*/
|
|
7849
|
-
|
|
7915
|
+
declare const isOcrOutput: (output: TypedOutput) => output is NarrowedTypedOutput<{
|
|
7916
|
+
extractedText: string;
|
|
7917
|
+
format: "markdown" | "plain" | "structured";
|
|
7918
|
+
taskType: "convertToMarkdown" | "freeOcr" | "parseFigure" | "locateObject";
|
|
7919
|
+
confidence?: number | undefined;
|
|
7920
|
+
}, string>;
|
|
7850
7921
|
/**
|
|
7851
|
-
*
|
|
7922
|
+
* Type guard for image description output nodes.
|
|
7852
7923
|
*
|
|
7853
|
-
*
|
|
7854
|
-
* including UploadServer (which is provided by the runtime).
|
|
7924
|
+
* Validates that an output is from an image description node and contains valid description data.
|
|
7855
7925
|
*
|
|
7856
|
-
* @
|
|
7926
|
+
* @param output - The output to check
|
|
7927
|
+
* @returns True if the output is an image description output with valid description data
|
|
7857
7928
|
*
|
|
7858
7929
|
* @example
|
|
7859
7930
|
* ```typescript
|
|
7860
|
-
*
|
|
7861
|
-
*
|
|
7862
|
-
*
|
|
7863
|
-
*
|
|
7864
|
-
*
|
|
7865
|
-
*
|
|
7866
|
-
* }
|
|
7867
|
-
* type AllRequirements = FlowRequirements<typeof myFlow.nodes>;
|
|
7868
|
-
* // AllRequirements = ImagePlugin | UploadServer
|
|
7931
|
+
* import { isImageDescriptionOutput } from "@uploadista/core/flow";
|
|
7932
|
+
*
|
|
7933
|
+
* if (isImageDescriptionOutput(output)) {
|
|
7934
|
+
* // output.data is typed as ImageDescriptionOutput
|
|
7935
|
+
* console.log("Description:", output.data.description);
|
|
7936
|
+
* console.log("Confidence:", output.data.confidence);
|
|
7937
|
+
* }
|
|
7869
7938
|
* ```
|
|
7870
7939
|
*/
|
|
7871
|
-
|
|
7940
|
+
declare const isImageDescriptionOutput: (output: TypedOutput) => output is NarrowedTypedOutput<{
|
|
7941
|
+
description: string;
|
|
7942
|
+
confidence?: number | undefined;
|
|
7943
|
+
metadata?: Record<string, unknown> | undefined;
|
|
7944
|
+
}, string>;
|
|
7872
7945
|
/**
|
|
7873
|
-
*
|
|
7946
|
+
* Filter an array of outputs to only those matching a specific type.
|
|
7874
7947
|
*
|
|
7875
|
-
* This
|
|
7876
|
-
*
|
|
7877
|
-
*
|
|
7948
|
+
* This helper function filters outputs using a type guard and returns a
|
|
7949
|
+
* properly typed array of results. It's useful for extracting specific
|
|
7950
|
+
* output types from multi-output flows.
|
|
7878
7951
|
*
|
|
7879
|
-
* @template
|
|
7952
|
+
* @template TOutput - The expected narrowed output type
|
|
7953
|
+
* @param outputs - Array of typed outputs to filter
|
|
7954
|
+
* @param typeGuard - Type guard function to use for filtering
|
|
7955
|
+
* @returns Array of outputs that match the type guard, properly typed
|
|
7880
7956
|
*
|
|
7881
7957
|
* @example
|
|
7882
7958
|
* ```typescript
|
|
7883
|
-
*
|
|
7884
|
-
*
|
|
7885
|
-
*
|
|
7886
|
-
*
|
|
7887
|
-
*
|
|
7888
|
-
*
|
|
7889
|
-
*
|
|
7890
|
-
*
|
|
7891
|
-
*
|
|
7959
|
+
* import { filterOutputsByType, isStorageOutput } from "@uploadista/core/flow";
|
|
7960
|
+
*
|
|
7961
|
+
* // Get all storage outputs from a multi-output flow
|
|
7962
|
+
* const storageOutputs = filterOutputsByType(
|
|
7963
|
+
* flowResult.outputs,
|
|
7964
|
+
* isStorageOutput
|
|
7965
|
+
* );
|
|
7966
|
+
*
|
|
7967
|
+
* for (const output of storageOutputs) {
|
|
7968
|
+
* // Each output.data is typed as UploadFile
|
|
7969
|
+
* console.log("Saved file:", output.data.url);
|
|
7970
|
+
* }
|
|
7892
7971
|
* ```
|
|
7893
7972
|
*/
|
|
7894
|
-
|
|
7973
|
+
declare function filterOutputsByType<TOutput extends TypedOutput>(outputs: TypedOutput[], typeGuard: (output: TypedOutput) => output is TOutput): TOutput[];
|
|
7895
7974
|
/**
|
|
7896
|
-
*
|
|
7975
|
+
* Get a single output of a specific type from an array of outputs.
|
|
7897
7976
|
*
|
|
7898
|
-
*
|
|
7899
|
-
*
|
|
7977
|
+
* This helper function finds exactly one output matching the type guard.
|
|
7978
|
+
* It throws an error if no outputs match or if multiple outputs match,
|
|
7979
|
+
* ensuring the caller receives exactly the expected result.
|
|
7900
7980
|
*
|
|
7901
|
-
*
|
|
7981
|
+
* @template TOutput - The expected narrowed output type
|
|
7982
|
+
* @param outputs - Array of typed outputs to search
|
|
7983
|
+
* @param typeGuard - Type guard function to use for matching
|
|
7984
|
+
* @returns The single matching output, properly typed
|
|
7985
|
+
* @throws {UploadistaError} If no outputs match (OUTPUT_NOT_FOUND)
|
|
7986
|
+
* @throws {UploadistaError} If multiple outputs match (MULTIPLE_OUTPUTS_FOUND)
|
|
7987
|
+
*
|
|
7988
|
+
* @example
|
|
7989
|
+
* ```typescript
|
|
7990
|
+
* import { getSingleOutputByType, isStorageOutput } from "@uploadista/core/flow";
|
|
7991
|
+
*
|
|
7992
|
+
* try {
|
|
7993
|
+
* const storageOutput = getSingleOutputByType(
|
|
7994
|
+
* flowResult.outputs,
|
|
7995
|
+
* isStorageOutput
|
|
7996
|
+
* );
|
|
7997
|
+
* // storageOutput.data is typed as UploadFile
|
|
7998
|
+
* console.log("File saved at:", storageOutput.data.url);
|
|
7999
|
+
* } catch (error) {
|
|
8000
|
+
* if (error.code === "OUTPUT_NOT_FOUND") {
|
|
8001
|
+
* console.error("No storage output found");
|
|
8002
|
+
* } else if (error.code === "MULTIPLE_OUTPUTS_FOUND") {
|
|
8003
|
+
* console.error("Multiple storage outputs found, expected one");
|
|
8004
|
+
* }
|
|
8005
|
+
* }
|
|
8006
|
+
* ```
|
|
7902
8007
|
*/
|
|
7903
|
-
|
|
7904
|
-
type ExtractKeysByNodeType<TNodes extends NodeDefinitionsRecord, TType extends NodeType> = { [K in keyof TNodes]: InferNode<TNodes[K]>["type"] extends TType ? K : never }[keyof TNodes];
|
|
7905
|
-
type SchemaInfer<T> = T extends z.ZodTypeAny ? z.infer<T> : never;
|
|
7906
|
-
type FlowInputMap<TNodes extends NodeDefinitionsRecord> = { [K in Extract<ExtractKeysByNodeType<TNodes, NodeType.input>, string>]: SchemaInfer<InferNode<TNodes[K]>["inputSchema"]> };
|
|
7907
|
-
type FlowOutputMap<TNodes extends NodeDefinitionsRecord> = { [K in Extract<keyof TNodes, string>]: SchemaInfer<InferNode<TNodes[K]>["outputSchema"]> };
|
|
7908
|
-
type FlowInputUnion<TNodes extends NodeDefinitionsRecord> = { [K in Extract<ExtractKeysByNodeType<TNodes, NodeType.input>, string>]: SchemaInfer<InferNode<TNodes[K]>["inputSchema"]> }[Extract<ExtractKeysByNodeType<TNodes, NodeType.input>, string>];
|
|
7909
|
-
type FlowOutputUnion<TNodes extends NodeDefinitionsRecord> = { [K in Extract<keyof TNodes, string>]: SchemaInfer<InferNode<TNodes[K]>["outputSchema"]> }[Extract<keyof TNodes, string>];
|
|
7910
|
-
type NodeKey<TNodes extends NodeDefinitionsRecord> = Extract<keyof TNodes, string>;
|
|
7911
|
-
type TypedFlowEdge<TNodes extends NodeDefinitionsRecord> = {
|
|
7912
|
-
source: NodeKey<TNodes>;
|
|
7913
|
-
target: NodeKey<TNodes>;
|
|
7914
|
-
sourcePort?: string;
|
|
7915
|
-
targetPort?: string;
|
|
7916
|
-
};
|
|
7917
|
-
type TypedFlowConfig<TNodes extends NodeDefinitionsRecord> = {
|
|
7918
|
-
flowId: string;
|
|
7919
|
-
name: string;
|
|
7920
|
-
nodes: TNodes;
|
|
7921
|
-
edges: Array<TypedFlowEdge<TNodes>>;
|
|
7922
|
-
typeChecker?: TypeCompatibilityChecker;
|
|
7923
|
-
onEvent?: (event: FlowEvent) => Effect.Effect<{
|
|
7924
|
-
eventId: string | null;
|
|
7925
|
-
}, UploadistaError>;
|
|
7926
|
-
parallelExecution?: {
|
|
7927
|
-
enabled?: boolean;
|
|
7928
|
-
maxConcurrency?: number;
|
|
7929
|
-
};
|
|
7930
|
-
inputSchema?: z.ZodTypeAny;
|
|
7931
|
-
outputSchema?: z.ZodTypeAny;
|
|
7932
|
-
hooks?: {
|
|
7933
|
-
/**
|
|
7934
|
-
* Called when a sink node (terminal node with no outgoing edges) produces an output.
|
|
7935
|
-
* This hook runs after auto-persistence for UploadFile outputs.
|
|
7936
|
-
*
|
|
7937
|
-
* Use this hook to perform additional post-processing such as:
|
|
7938
|
-
* - Saving output metadata to a database
|
|
7939
|
-
* - Tracking outputs in external systems
|
|
7940
|
-
* - Adding custom metadata to outputs
|
|
7941
|
-
* - Triggering downstream workflows
|
|
7942
|
-
*
|
|
7943
|
-
* **Important**: The hook must not have any service requirements (Effect requirements must be `never`).
|
|
7944
|
-
* All necessary services should be captured in the closure when defining the hook.
|
|
7945
|
-
*
|
|
7946
|
-
* @example
|
|
7947
|
-
* ```typescript
|
|
7948
|
-
* // Using Promise (simpler for most users)
|
|
7949
|
-
* hooks: {
|
|
7950
|
-
* onNodeOutput: async ({ output }) => {
|
|
7951
|
-
* await db.save(output);
|
|
7952
|
-
* return output;
|
|
7953
|
-
* }
|
|
7954
|
-
* }
|
|
7955
|
-
* ```
|
|
7956
|
-
*/
|
|
7957
|
-
onNodeOutput?: <TOutput>(context: {
|
|
7958
|
-
output: TOutput;
|
|
7959
|
-
nodeId: string;
|
|
7960
|
-
flowId: string;
|
|
7961
|
-
jobId: string;
|
|
7962
|
-
storageId: string;
|
|
7963
|
-
clientId: string | null;
|
|
7964
|
-
}) => Effect.Effect<TOutput, UploadistaError, never> | Promise<TOutput>;
|
|
7965
|
-
};
|
|
7966
|
-
/**
|
|
7967
|
-
* Circuit breaker configuration for resilience against external service failures.
|
|
7968
|
-
*
|
|
7969
|
-
* @example
|
|
7970
|
-
* ```typescript
|
|
7971
|
-
* circuitBreaker: {
|
|
7972
|
-
* defaults: { enabled: false },
|
|
7973
|
-
* nodeTypeOverrides: {
|
|
7974
|
-
* "Describe Image": {
|
|
7975
|
-
* enabled: true,
|
|
7976
|
-
* failureThreshold: 5,
|
|
7977
|
-
* resetTimeout: 60000,
|
|
7978
|
-
* fallback: { type: "skip", passThrough: true }
|
|
7979
|
-
* }
|
|
7980
|
-
* }
|
|
7981
|
-
* }
|
|
7982
|
-
* ```
|
|
7983
|
-
*/
|
|
7984
|
-
circuitBreaker?: {
|
|
7985
|
-
/** Default circuit breaker config for all nodes */
|
|
7986
|
-
defaults?: FlowCircuitBreakerConfig;
|
|
7987
|
-
/** Override circuit breaker config per node type (node name) */
|
|
7988
|
-
nodeTypeOverrides?: Record<string, FlowCircuitBreakerConfig>;
|
|
7989
|
-
};
|
|
7990
|
-
};
|
|
7991
|
-
declare const typedFlowInputsSymbol: unique symbol;
|
|
7992
|
-
declare const typedFlowOutputsSymbol: unique symbol;
|
|
7993
|
-
declare const typedFlowPluginsSymbol: unique symbol;
|
|
8008
|
+
declare function getSingleOutputByType<TOutput extends TypedOutput>(outputs: TypedOutput[], typeGuard: (output: TypedOutput) => output is TOutput): Effect.Effect<TOutput, UploadistaError>;
|
|
7994
8009
|
/**
|
|
7995
|
-
*
|
|
7996
|
-
*
|
|
7997
|
-
* TypedFlow extends the base Flow type with additional type information that
|
|
7998
|
-
* allows TypeScript to verify inputs, outputs, and plugin requirements at compile time.
|
|
8010
|
+
* Get the first output of a specific type, if any exists.
|
|
7999
8011
|
*
|
|
8000
|
-
*
|
|
8001
|
-
*
|
|
8002
|
-
*
|
|
8012
|
+
* Unlike getSingleOutputByType, this function returns undefined if no outputs
|
|
8013
|
+
* match, and returns the first match if multiple outputs exist. This is useful
|
|
8014
|
+
* when you want a more lenient matching strategy.
|
|
8003
8015
|
*
|
|
8004
|
-
* @template
|
|
8005
|
-
* @
|
|
8006
|
-
* @
|
|
8016
|
+
* @template TOutput - The expected narrowed output type
|
|
8017
|
+
* @param outputs - Array of typed outputs to search
|
|
8018
|
+
* @param typeGuard - Type guard function to use for matching
|
|
8019
|
+
* @returns The first matching output, or undefined if none match
|
|
8007
8020
|
*
|
|
8008
8021
|
* @example
|
|
8009
8022
|
* ```typescript
|
|
8010
|
-
*
|
|
8011
|
-
* nodes: {
|
|
8012
|
-
* input: fileInputNode,
|
|
8013
|
-
* resize: imageResizeNode,
|
|
8014
|
-
* output: s3OutputNode
|
|
8015
|
-
* },
|
|
8016
|
-
* edges: [
|
|
8017
|
-
* { source: 'input', target: 'resize' },
|
|
8018
|
-
* { source: 'resize', target: 'output' }
|
|
8019
|
-
* ]
|
|
8020
|
-
* });
|
|
8023
|
+
* import { getFirstOutputByType, isStorageOutput } from "@uploadista/core/flow";
|
|
8021
8024
|
*
|
|
8022
|
-
*
|
|
8023
|
-
*
|
|
8024
|
-
*
|
|
8025
|
-
*
|
|
8025
|
+
* const storageOutput = getFirstOutputByType(
|
|
8026
|
+
* flowResult.outputs,
|
|
8027
|
+
* isStorageOutput
|
|
8028
|
+
* );
|
|
8029
|
+
*
|
|
8030
|
+
* if (storageOutput) {
|
|
8031
|
+
* console.log("First storage output:", storageOutput.data.url);
|
|
8032
|
+
* } else {
|
|
8033
|
+
* console.log("No storage outputs found");
|
|
8034
|
+
* }
|
|
8026
8035
|
* ```
|
|
8027
8036
|
*/
|
|
8028
|
-
|
|
8029
|
-
run: (args: {
|
|
8030
|
-
inputs?: Partial<FlowInputMap<TNodes>>;
|
|
8031
|
-
storageId: string;
|
|
8032
|
-
jobId: string;
|
|
8033
|
-
}) => Effect.Effect<FlowExecutionResult<FlowOutputMap<TNodes>>, UploadistaError, FlowRequirements<TNodes>>;
|
|
8034
|
-
resume: (args: {
|
|
8035
|
-
jobId: string;
|
|
8036
|
-
storageId: string;
|
|
8037
|
-
nodeResults: Record<string, unknown>;
|
|
8038
|
-
executionState: {
|
|
8039
|
-
executionOrder: string[];
|
|
8040
|
-
currentIndex: number;
|
|
8041
|
-
inputs: Partial<FlowInputMap<TNodes>>;
|
|
8042
|
-
};
|
|
8043
|
-
}) => Effect.Effect<FlowExecutionResult<FlowOutputMap<TNodes>>, UploadistaError, FlowRequirements<TNodes>>;
|
|
8044
|
-
readonly [typedFlowInputsSymbol]?: FlowInputMap<TNodes>;
|
|
8045
|
-
readonly [typedFlowOutputsSymbol]?: FlowOutputMap<TNodes>;
|
|
8046
|
-
readonly [typedFlowPluginsSymbol]?: FlowPluginRequirements<TNodes>;
|
|
8047
|
-
};
|
|
8048
|
-
declare function createFlow<TNodes extends NodeDefinitionsRecord>(config: TypedFlowConfig<TNodes>): Effect.Effect<TypedFlow<TNodes, z.ZodType<FlowInputUnion<TNodes>>, z.ZodType<FlowOutputUnion<TNodes>>>, NodesErrorUnion<TNodes> | UploadistaError, FlowRequirements<TNodes>>;
|
|
8049
|
-
//#endregion
|
|
8050
|
-
//#region src/flow/types/run-args.d.ts
|
|
8037
|
+
declare function getFirstOutputByType<TOutput extends TypedOutput>(outputs: TypedOutput[], typeGuard: (output: TypedOutput) => output is TOutput): TOutput | undefined;
|
|
8051
8038
|
/**
|
|
8052
|
-
*
|
|
8039
|
+
* Get an output by its node ID.
|
|
8053
8040
|
*
|
|
8054
|
-
*
|
|
8041
|
+
* This helper finds an output produced by a specific node instance,
|
|
8042
|
+
* regardless of its type. Useful when you know the specific node ID
|
|
8043
|
+
* you're looking for.
|
|
8044
|
+
*
|
|
8045
|
+
* @param outputs - Array of typed outputs to search
|
|
8046
|
+
* @param nodeId - The node ID to match
|
|
8047
|
+
* @returns The output from the specified node, or undefined if not found
|
|
8055
8048
|
*
|
|
8056
8049
|
* @example
|
|
8057
8050
|
* ```typescript
|
|
8058
|
-
*
|
|
8059
|
-
* inputs: {
|
|
8060
|
-
* "input-node-1": { file: myFile, metadata: { ... } },
|
|
8061
|
-
* "input-node-2": { file: anotherFile }
|
|
8062
|
-
* }
|
|
8063
|
-
* };
|
|
8051
|
+
* import { getOutputByNodeId } from "@uploadista/core/flow";
|
|
8064
8052
|
*
|
|
8065
|
-
*
|
|
8066
|
-
*
|
|
8053
|
+
* const cdnOutput = getOutputByNodeId(flowResult.outputs, "cdn-storage");
|
|
8054
|
+
* if (cdnOutput) {
|
|
8055
|
+
* console.log("CDN output:", cdnOutput.data);
|
|
8056
|
+
* }
|
|
8067
8057
|
* ```
|
|
8068
8058
|
*/
|
|
8069
|
-
declare
|
|
8070
|
-
inputs: z.ZodRecord<z.ZodString, z.ZodAny>;
|
|
8071
|
-
}, z.core.$strip>;
|
|
8072
|
-
/**
|
|
8073
|
-
* Type representing validated flow run arguments.
|
|
8074
|
-
*
|
|
8075
|
-
* This type is inferred from the runArgsSchema and ensures type safety
|
|
8076
|
-
* when passing inputs to flow execution.
|
|
8077
|
-
*/
|
|
8078
|
-
type RunArgs = z.infer<typeof runArgsSchema>;
|
|
8079
|
-
//#endregion
|
|
8080
|
-
//#region src/flow/types/dead-letter-item.d.ts
|
|
8059
|
+
declare function getOutputByNodeId(outputs: TypedOutput[], nodeId: string): TypedOutput | undefined;
|
|
8081
8060
|
/**
|
|
8082
|
-
*
|
|
8061
|
+
* Check if any outputs match a specific type.
|
|
8083
8062
|
*
|
|
8084
|
-
*
|
|
8085
|
-
*
|
|
8063
|
+
* Simple predicate function to check if at least one output of a given
|
|
8064
|
+
* type exists in the results.
|
|
8086
8065
|
*
|
|
8087
|
-
* @
|
|
8088
|
-
* @
|
|
8089
|
-
|
|
8090
|
-
|
|
8091
|
-
* Status of a Dead Letter Queue item.
|
|
8066
|
+
* @template TOutput - The expected narrowed output type
|
|
8067
|
+
* @param outputs - Array of typed outputs to check
|
|
8068
|
+
* @param typeGuard - Type guard function to use for checking
|
|
8069
|
+
* @returns True if at least one output matches the type guard
|
|
8092
8070
|
*
|
|
8093
|
-
*
|
|
8071
|
+
* @example
|
|
8072
|
+
* ```typescript
|
|
8073
|
+
* import { hasOutputOfType, isStorageOutput } from "@uploadista/core/flow";
|
|
8094
8074
|
*
|
|
8095
|
-
*
|
|
8096
|
-
*
|
|
8097
|
-
*
|
|
8098
|
-
*
|
|
8075
|
+
* if (hasOutputOfType(flowResult.outputs, isStorageOutput)) {
|
|
8076
|
+
* console.log("Flow produced at least one storage output");
|
|
8077
|
+
* } else {
|
|
8078
|
+
* console.log("No storage outputs in this flow");
|
|
8079
|
+
* }
|
|
8080
|
+
* ```
|
|
8099
8081
|
*/
|
|
8100
|
-
|
|
8082
|
+
declare function hasOutputOfType<TOutput extends TypedOutput>(outputs: TypedOutput[], typeGuard: (output: TypedOutput) => output is TOutput): boolean;
|
|
8101
8083
|
/**
|
|
8102
|
-
*
|
|
8084
|
+
* Type guard for init operation (streaming file upload initialization).
|
|
8103
8085
|
*
|
|
8104
|
-
*
|
|
8086
|
+
* Checks if the input data is an init operation that starts a streaming
|
|
8087
|
+
* file upload session.
|
|
8105
8088
|
*
|
|
8106
|
-
* @
|
|
8107
|
-
* @
|
|
8108
|
-
* @property nodeId - ID of the node that failed (if applicable)
|
|
8109
|
-
* @property stack - Stack trace (included in development mode)
|
|
8110
|
-
*/
|
|
8111
|
-
interface DeadLetterError {
|
|
8112
|
-
/** Error code for categorization and retry filtering */
|
|
8113
|
-
code: string;
|
|
8114
|
-
/** Human-readable error message */
|
|
8115
|
-
message: string;
|
|
8116
|
-
/** Node that failed (if applicable) */
|
|
8117
|
-
nodeId?: string;
|
|
8118
|
-
/** Stack trace (in dev mode only) */
|
|
8119
|
-
stack?: string;
|
|
8120
|
-
}
|
|
8121
|
-
/**
|
|
8122
|
-
* Record of a single retry attempt.
|
|
8089
|
+
* @param data - Input data to check
|
|
8090
|
+
* @returns True if data is an init operation
|
|
8123
8091
|
*
|
|
8124
|
-
* @
|
|
8125
|
-
*
|
|
8126
|
-
*
|
|
8092
|
+
* @example
|
|
8093
|
+
* ```typescript
|
|
8094
|
+
* if (isInitOperation(inputData)) {
|
|
8095
|
+
* console.log("Storage ID:", inputData.storageId);
|
|
8096
|
+
* console.log("Metadata:", inputData.metadata);
|
|
8097
|
+
* }
|
|
8098
|
+
* ```
|
|
8127
8099
|
*/
|
|
8128
|
-
|
|
8129
|
-
|
|
8130
|
-
|
|
8131
|
-
/** Error message if the retry failed */
|
|
8132
|
-
error: string;
|
|
8133
|
-
/** Duration of the retry attempt in milliseconds */
|
|
8134
|
-
durationMs: number;
|
|
8135
|
-
}
|
|
8100
|
+
declare function isInitOperation(data: InputData): data is Extract<InputData, {
|
|
8101
|
+
operation: "init";
|
|
8102
|
+
}>;
|
|
8136
8103
|
/**
|
|
8137
|
-
*
|
|
8104
|
+
* Type guard for finalize operation (complete streaming upload).
|
|
8138
8105
|
*
|
|
8139
|
-
*
|
|
8140
|
-
*
|
|
8106
|
+
* Checks if the input data is a finalize operation that completes a
|
|
8107
|
+
* previously initialized streaming upload.
|
|
8141
8108
|
*
|
|
8142
|
-
* @
|
|
8143
|
-
* @
|
|
8144
|
-
* @property flowId - Flow definition that was being executed
|
|
8145
|
-
* @property storageId - Target storage for the flow
|
|
8146
|
-
* @property clientId - Client who initiated the job
|
|
8147
|
-
* @property error - Comprehensive error details
|
|
8148
|
-
* @property inputs - Original inputs passed to the flow
|
|
8149
|
-
* @property nodeResults - Partial results from nodes that completed before failure
|
|
8150
|
-
* @property failedAtNodeId - Node where execution failed (if applicable)
|
|
8151
|
-
* @property retryCount - Number of retry attempts made
|
|
8152
|
-
* @property maxRetries - Maximum retries allowed from retry policy
|
|
8153
|
-
* @property nextRetryAt - Scheduled time for next automatic retry
|
|
8154
|
-
* @property retryHistory - History of all retry attempts
|
|
8155
|
-
* @property createdAt - When the item was added to DLQ
|
|
8156
|
-
* @property updatedAt - When the item was last modified
|
|
8157
|
-
* @property expiresAt - TTL for automatic cleanup
|
|
8158
|
-
* @property status - Current status of the DLQ item
|
|
8109
|
+
* @param data - Input data to check
|
|
8110
|
+
* @returns True if data is a finalize operation
|
|
8159
8111
|
*
|
|
8160
8112
|
* @example
|
|
8161
8113
|
* ```typescript
|
|
8162
|
-
*
|
|
8163
|
-
*
|
|
8164
|
-
*
|
|
8165
|
-
* flowId: "image-resize-pipeline",
|
|
8166
|
-
* storageId: "s3-production",
|
|
8167
|
-
* clientId: "client_456",
|
|
8168
|
-
* error: {
|
|
8169
|
-
* code: "FLOW_NODE_ERROR",
|
|
8170
|
-
* message: "External service timeout",
|
|
8171
|
-
* nodeId: "resize-node"
|
|
8172
|
-
* },
|
|
8173
|
-
* inputs: { input: { uploadId: "upload_123" } },
|
|
8174
|
-
* nodeResults: { "input-node": { file: {...} } },
|
|
8175
|
-
* failedAtNodeId: "resize-node",
|
|
8176
|
-
* retryCount: 2,
|
|
8177
|
-
* maxRetries: 3,
|
|
8178
|
-
* nextRetryAt: new Date("2024-01-15T10:35:00Z"),
|
|
8179
|
-
* retryHistory: [
|
|
8180
|
-
* { attemptedAt: new Date("2024-01-15T10:30:00Z"), error: "Timeout", durationMs: 5000 },
|
|
8181
|
-
* { attemptedAt: new Date("2024-01-15T10:32:00Z"), error: "Timeout", durationMs: 5000 }
|
|
8182
|
-
* ],
|
|
8183
|
-
* createdAt: new Date("2024-01-15T10:30:00Z"),
|
|
8184
|
-
* updatedAt: new Date("2024-01-15T10:32:00Z"),
|
|
8185
|
-
* expiresAt: new Date("2024-01-22T10:30:00Z"),
|
|
8186
|
-
* status: "pending"
|
|
8187
|
-
* };
|
|
8114
|
+
* if (isFinalizeOperation(inputData)) {
|
|
8115
|
+
* console.log("Upload ID:", inputData.uploadId);
|
|
8116
|
+
* }
|
|
8188
8117
|
* ```
|
|
8189
8118
|
*/
|
|
8190
|
-
|
|
8191
|
-
|
|
8192
|
-
|
|
8193
|
-
/** Original flow job ID that failed */
|
|
8194
|
-
jobId: string;
|
|
8195
|
-
/** Flow definition ID that was being executed */
|
|
8196
|
-
flowId: string;
|
|
8197
|
-
/** Target storage for the flow */
|
|
8198
|
-
storageId: string;
|
|
8199
|
-
/** Client who initiated the job (null for anonymous) */
|
|
8200
|
-
clientId: string | null;
|
|
8201
|
-
/** Comprehensive error details */
|
|
8202
|
-
error: DeadLetterError;
|
|
8203
|
-
/** Original inputs passed to the flow */
|
|
8204
|
-
inputs: Record<string, unknown>;
|
|
8205
|
-
/** Partial results from nodes that completed before failure */
|
|
8206
|
-
nodeResults: Record<string, unknown>;
|
|
8207
|
-
/** Node where execution failed (if applicable) */
|
|
8208
|
-
failedAtNodeId?: string;
|
|
8209
|
-
/** Number of retry attempts made */
|
|
8210
|
-
retryCount: number;
|
|
8211
|
-
/** Maximum retries allowed from retry policy */
|
|
8212
|
-
maxRetries: number;
|
|
8213
|
-
/** Scheduled time for next automatic retry */
|
|
8214
|
-
nextRetryAt?: Date;
|
|
8215
|
-
/** History of all retry attempts */
|
|
8216
|
-
retryHistory: DeadLetterRetryAttempt[];
|
|
8217
|
-
/** When the item was added to DLQ */
|
|
8218
|
-
createdAt: Date;
|
|
8219
|
-
/** When the item was last modified */
|
|
8220
|
-
updatedAt: Date;
|
|
8221
|
-
/** TTL for automatic cleanup */
|
|
8222
|
-
expiresAt?: Date;
|
|
8223
|
-
/** Current status of the DLQ item */
|
|
8224
|
-
status: DeadLetterItemStatus;
|
|
8225
|
-
}
|
|
8119
|
+
declare function isFinalizeOperation(data: InputData): data is Extract<InputData, {
|
|
8120
|
+
operation: "finalize";
|
|
8121
|
+
}>;
|
|
8226
8122
|
/**
|
|
8227
|
-
*
|
|
8123
|
+
* Type guard for URL operation (direct file fetch from URL).
|
|
8228
8124
|
*
|
|
8229
|
-
*
|
|
8125
|
+
* Checks if the input data is a URL operation that fetches a file
|
|
8126
|
+
* directly from an external URL.
|
|
8230
8127
|
*
|
|
8231
|
-
* @
|
|
8232
|
-
* @
|
|
8233
|
-
*
|
|
8234
|
-
* @
|
|
8235
|
-
*
|
|
8128
|
+
* @param data - Input data to check
|
|
8129
|
+
* @returns True if data is a URL operation
|
|
8130
|
+
*
|
|
8131
|
+
* @example
|
|
8132
|
+
* ```typescript
|
|
8133
|
+
* if (isUrlOperation(inputData)) {
|
|
8134
|
+
* console.log("Fetching from:", inputData.url);
|
|
8135
|
+
* console.log("Optional storage:", inputData.storageId);
|
|
8136
|
+
* }
|
|
8137
|
+
* ```
|
|
8236
8138
|
*/
|
|
8237
|
-
|
|
8238
|
-
|
|
8239
|
-
|
|
8240
|
-
/** Count of items by status */
|
|
8241
|
-
byStatus: Record<DeadLetterItemStatus, number>;
|
|
8242
|
-
/** Count of items by flow ID */
|
|
8243
|
-
byFlow: Record<string, number>;
|
|
8244
|
-
/** Timestamp of the oldest item */
|
|
8245
|
-
oldestItem?: Date;
|
|
8246
|
-
/** Average number of retries across all items */
|
|
8247
|
-
averageRetryCount: number;
|
|
8248
|
-
}
|
|
8139
|
+
declare function isUrlOperation(data: InputData): data is Extract<InputData, {
|
|
8140
|
+
operation: "url";
|
|
8141
|
+
}>;
|
|
8249
8142
|
/**
|
|
8250
|
-
*
|
|
8143
|
+
* Type guard for upload operations (init or url).
|
|
8251
8144
|
*
|
|
8252
|
-
*
|
|
8253
|
-
*
|
|
8254
|
-
*
|
|
8255
|
-
* @
|
|
8256
|
-
* @
|
|
8145
|
+
* Checks if the input data is either an init or URL operation (i.e., operations
|
|
8146
|
+
* that trigger new uploads, as opposed to finalize which completes an existing upload).
|
|
8147
|
+
*
|
|
8148
|
+
* @param data - Input data to check
|
|
8149
|
+
* @returns True if data is an init or URL operation
|
|
8150
|
+
*
|
|
8151
|
+
* @example
|
|
8152
|
+
* ```typescript
|
|
8153
|
+
* if (isUploadOperation(inputData)) {
|
|
8154
|
+
* // This is a new upload, not a finalization
|
|
8155
|
+
* if (isInitOperation(inputData)) {
|
|
8156
|
+
* console.log("Streaming upload");
|
|
8157
|
+
* } else {
|
|
8158
|
+
* console.log("URL fetch");
|
|
8159
|
+
* }
|
|
8160
|
+
* }
|
|
8161
|
+
* ```
|
|
8257
8162
|
*/
|
|
8258
|
-
|
|
8259
|
-
|
|
8260
|
-
|
|
8261
|
-
|
|
8262
|
-
|
|
8263
|
-
/** Filter by client ID */
|
|
8264
|
-
clientId?: string;
|
|
8265
|
-
/** Maximum items to return (default: 50) */
|
|
8266
|
-
limit?: number;
|
|
8267
|
-
/** Number of items to skip for pagination (default: 0) */
|
|
8268
|
-
offset?: number;
|
|
8269
|
-
}
|
|
8163
|
+
declare function isUploadOperation(data: InputData): data is Extract<InputData, {
|
|
8164
|
+
operation: "init" | "url";
|
|
8165
|
+
}>;
|
|
8166
|
+
//#endregion
|
|
8167
|
+
//#region src/flow/typed-flow.d.ts
|
|
8270
8168
|
/**
|
|
8271
|
-
*
|
|
8169
|
+
* Defines a node that can be used in a typed flow.
|
|
8272
8170
|
*
|
|
8273
|
-
*
|
|
8274
|
-
*
|
|
8275
|
-
*
|
|
8171
|
+
* A node definition can be either:
|
|
8172
|
+
* - A plain FlowNode object
|
|
8173
|
+
* - An Effect that resolves to a FlowNode (for nodes requiring dependencies)
|
|
8174
|
+
*
|
|
8175
|
+
* @template TNodeError - The error types that the node can produce
|
|
8176
|
+
* @template TNodeRequirements - The services/dependencies the node requires
|
|
8276
8177
|
*/
|
|
8277
|
-
|
|
8278
|
-
/** Number of items that were retried */
|
|
8279
|
-
retried: number;
|
|
8280
|
-
/** Number of retries that succeeded */
|
|
8281
|
-
succeeded: number;
|
|
8282
|
-
/** Number of retries that failed */
|
|
8283
|
-
failed: number;
|
|
8284
|
-
}
|
|
8178
|
+
type NodeDefinition<TNodeError = never, TNodeRequirements = never> = FlowNode<any, any, UploadistaError> | Effect.Effect<FlowNode<any, any, UploadistaError>, TNodeError, TNodeRequirements>;
|
|
8285
8179
|
/**
|
|
8286
|
-
*
|
|
8180
|
+
* A record mapping node IDs to their definitions.
|
|
8287
8181
|
*
|
|
8288
|
-
*
|
|
8182
|
+
* This is the primary type used for defining the nodes in a typed flow,
|
|
8183
|
+
* allowing TypeScript to infer input/output schemas and requirements.
|
|
8184
|
+
*
|
|
8185
|
+
* @example
|
|
8186
|
+
* ```typescript
|
|
8187
|
+
* const nodes = {
|
|
8188
|
+
* input: fileInputNode,
|
|
8189
|
+
* resize: Effect.succeed(imageResizeNode),
|
|
8190
|
+
* output: s3OutputNode
|
|
8191
|
+
* } satisfies NodeDefinitionsRecord;
|
|
8192
|
+
* ```
|
|
8289
8193
|
*/
|
|
8290
|
-
|
|
8291
|
-
/** Number of items that were deleted */
|
|
8292
|
-
deleted: number;
|
|
8293
|
-
}
|
|
8194
|
+
type NodeDefinitionsRecord = Record<string, NodeDefinition<any, any>>;
|
|
8294
8195
|
/**
|
|
8295
|
-
*
|
|
8196
|
+
* Extracts the error type from a NodeDefinition.
|
|
8296
8197
|
*
|
|
8297
|
-
*
|
|
8298
|
-
*
|
|
8198
|
+
* If the node is an Effect, extracts its error type.
|
|
8199
|
+
* If the node is a plain FlowNode, returns never (no errors).
|
|
8299
8200
|
*/
|
|
8300
|
-
|
|
8301
|
-
/** Delete items older than this date */
|
|
8302
|
-
olderThan?: Date;
|
|
8303
|
-
/** Only delete items with this status */
|
|
8304
|
-
status?: "exhausted" | "resolved";
|
|
8305
|
-
}
|
|
8201
|
+
type NodeDefinitionError<T> = T extends Effect.Effect<FlowNode<any, any, UploadistaError>, infer TError, any> ? TError : never;
|
|
8306
8202
|
/**
|
|
8307
|
-
*
|
|
8203
|
+
* Extracts the requirements (dependencies) from a NodeDefinition.
|
|
8308
8204
|
*
|
|
8309
|
-
*
|
|
8310
|
-
* @property succeeded - Number that succeeded
|
|
8311
|
-
* @property failed - Number that failed
|
|
8205
|
+
* Uses the shared ExtractEffectRequirements utility for consistency.
|
|
8312
8206
|
*/
|
|
8313
|
-
|
|
8314
|
-
/** Total items processed */
|
|
8315
|
-
processed: number;
|
|
8316
|
-
/** Number of successful retries */
|
|
8317
|
-
succeeded: number;
|
|
8318
|
-
/** Number of failed retries */
|
|
8319
|
-
failed: number;
|
|
8320
|
-
}
|
|
8321
|
-
//#endregion
|
|
8322
|
-
//#region src/flow/dead-letter-queue.d.ts
|
|
8207
|
+
type NodeDefinitionRequirements<T> = ExtractEffectRequirements<T>;
|
|
8323
8208
|
/**
|
|
8324
|
-
*
|
|
8209
|
+
* Extracts all possible errors from all nodes in a flow as a union.
|
|
8325
8210
|
*
|
|
8326
|
-
*
|
|
8327
|
-
*
|
|
8211
|
+
* This iterates through all nodes in the record and combines their
|
|
8212
|
+
* error types into a single union type.
|
|
8328
8213
|
*/
|
|
8329
|
-
|
|
8330
|
-
|
|
8331
|
-
|
|
8332
|
-
|
|
8333
|
-
|
|
8334
|
-
|
|
8335
|
-
|
|
8336
|
-
|
|
8337
|
-
|
|
8338
|
-
|
|
8339
|
-
|
|
8340
|
-
|
|
8341
|
-
|
|
8342
|
-
|
|
8343
|
-
|
|
8344
|
-
|
|
8345
|
-
|
|
8346
|
-
|
|
8347
|
-
|
|
8348
|
-
|
|
8349
|
-
|
|
8350
|
-
* @returns Option of the DLQ item
|
|
8351
|
-
*/
|
|
8352
|
-
getOption(itemId: string): Effect.Effect<Option.Option<DeadLetterItem>, UploadistaError>;
|
|
8353
|
-
/**
|
|
8354
|
-
* Delete a DLQ item.
|
|
8355
|
-
*
|
|
8356
|
-
* @param itemId - The DLQ item ID to delete
|
|
8357
|
-
*/
|
|
8358
|
-
delete(itemId: string): Effect.Effect<void, UploadistaError>;
|
|
8359
|
-
/**
|
|
8360
|
-
* List DLQ items with optional filtering and pagination.
|
|
8361
|
-
*
|
|
8362
|
-
* @param options - Filter and pagination options
|
|
8363
|
-
* @returns List of items and total count
|
|
8364
|
-
*/
|
|
8365
|
-
list(options?: DeadLetterListOptions): Effect.Effect<{
|
|
8366
|
-
items: DeadLetterItem[];
|
|
8367
|
-
total: number;
|
|
8368
|
-
}, UploadistaError>;
|
|
8369
|
-
/**
|
|
8370
|
-
* Update a DLQ item.
|
|
8371
|
-
*
|
|
8372
|
-
* @param itemId - The DLQ item ID
|
|
8373
|
-
* @param updates - Partial updates to apply
|
|
8374
|
-
* @returns The updated item
|
|
8375
|
-
*/
|
|
8376
|
-
update(itemId: string, updates: Partial<DeadLetterItem>): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
8377
|
-
/**
|
|
8378
|
-
* Mark a DLQ item as being retried.
|
|
8379
|
-
*
|
|
8380
|
-
* @param itemId - The DLQ item ID
|
|
8381
|
-
* @returns The updated item with status "retrying"
|
|
8382
|
-
*/
|
|
8383
|
-
markRetrying(itemId: string): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
8384
|
-
/**
|
|
8385
|
-
* Record a failed retry attempt.
|
|
8386
|
-
*
|
|
8387
|
-
* @param itemId - The DLQ item ID
|
|
8388
|
-
* @param error - Error message from the failed retry
|
|
8389
|
-
* @param durationMs - Duration of the retry attempt
|
|
8390
|
-
* @returns The updated item
|
|
8391
|
-
*/
|
|
8392
|
-
recordRetryFailure(itemId: string, error: string, durationMs: number): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
8393
|
-
/**
|
|
8394
|
-
* Mark a DLQ item as resolved (successfully retried or manually resolved).
|
|
8395
|
-
*
|
|
8396
|
-
* @param itemId - The DLQ item ID
|
|
8397
|
-
* @returns The updated item with status "resolved"
|
|
8398
|
-
*/
|
|
8399
|
-
markResolved(itemId: string): Effect.Effect<DeadLetterItem, UploadistaError>;
|
|
8400
|
-
/**
|
|
8401
|
-
* Get items that are due for scheduled retry.
|
|
8402
|
-
*
|
|
8403
|
-
* @param limit - Maximum number of items to return
|
|
8404
|
-
* @returns List of items ready for retry
|
|
8405
|
-
*/
|
|
8406
|
-
getScheduledRetries(limit?: number): Effect.Effect<DeadLetterItem[], UploadistaError>;
|
|
8407
|
-
/**
|
|
8408
|
-
* Cleanup old DLQ items based on options.
|
|
8409
|
-
*
|
|
8410
|
-
* @param options - Cleanup criteria
|
|
8411
|
-
* @returns Number of items deleted
|
|
8412
|
-
*/
|
|
8413
|
-
cleanup(options?: DeadLetterCleanupOptions): Effect.Effect<DeadLetterCleanupResult, UploadistaError>;
|
|
8414
|
-
/**
|
|
8415
|
-
* Get DLQ statistics.
|
|
8416
|
-
*
|
|
8417
|
-
* @returns Aggregate statistics about the DLQ
|
|
8418
|
-
*/
|
|
8419
|
-
getStats(): Effect.Effect<DeadLetterQueueStats, UploadistaError>;
|
|
8420
|
-
}
|
|
8421
|
-
declare const DeadLetterQueueService_base: Context.TagClass<DeadLetterQueueService, "DeadLetterQueueService", DeadLetterQueueServiceShape>;
|
|
8214
|
+
type NodesErrorUnion<TNodes extends NodeDefinitionsRecord> = { [K in keyof TNodes]: NodeDefinitionError<TNodes[K]> }[keyof TNodes];
|
|
8215
|
+
/**
|
|
8216
|
+
* Extracts all service requirements from all nodes in a flow as a union.
|
|
8217
|
+
*
|
|
8218
|
+
* This iterates through all nodes in the record and combines their
|
|
8219
|
+
* requirement types into a single union type representing all services
|
|
8220
|
+
* needed by the flow.
|
|
8221
|
+
*
|
|
8222
|
+
* @template TNodes - The record of node definitions
|
|
8223
|
+
*
|
|
8224
|
+
* @example
|
|
8225
|
+
* ```typescript
|
|
8226
|
+
* const nodes = {
|
|
8227
|
+
* resize: imageResizeNode, // requires ImagePlugin
|
|
8228
|
+
* zip: zipNode, // requires ZipPlugin
|
|
8229
|
+
* };
|
|
8230
|
+
* type Requirements = NodesRequirementsUnion<typeof nodes>;
|
|
8231
|
+
* // Requirements = ImagePlugin | ZipPlugin
|
|
8232
|
+
* ```
|
|
8233
|
+
*/
|
|
8234
|
+
type NodesRequirementsUnion<TNodes extends NodeDefinitionsRecord> = { [K in keyof TNodes]: NodeDefinitionRequirements<TNodes[K]> }[keyof TNodes];
|
|
8422
8235
|
/**
|
|
8423
|
-
*
|
|
8236
|
+
* Extracts all service requirements from a flow's nodes.
|
|
8237
|
+
*
|
|
8238
|
+
* This includes all services required by any node in the flow,
|
|
8239
|
+
* including UploadEngine (which is provided by the runtime).
|
|
8240
|
+
*
|
|
8241
|
+
* @template TNodes - The record of node definitions
|
|
8424
8242
|
*
|
|
8425
8243
|
* @example
|
|
8426
8244
|
* ```typescript
|
|
8427
|
-
* const
|
|
8428
|
-
*
|
|
8429
|
-
*
|
|
8430
|
-
*
|
|
8245
|
+
* const myFlow = createFlow({
|
|
8246
|
+
* nodes: {
|
|
8247
|
+
* input: fileInputNode,
|
|
8248
|
+
* process: imageProcessNode, // requires ImagePlugin
|
|
8249
|
+
* },
|
|
8250
|
+
* edges: [...]
|
|
8431
8251
|
* });
|
|
8252
|
+
* type AllRequirements = FlowRequirements<typeof myFlow.nodes>;
|
|
8253
|
+
* // AllRequirements = ImagePlugin | UploadEngine
|
|
8432
8254
|
* ```
|
|
8433
8255
|
*/
|
|
8434
|
-
|
|
8256
|
+
type FlowRequirements<TNodes extends NodeDefinitionsRecord> = NodesRequirementsUnion<TNodes>;
|
|
8257
|
+
/**
|
|
8258
|
+
* Extracts plugin service requirements from a flow, excluding UploadEngine.
|
|
8259
|
+
*
|
|
8260
|
+
* This type is useful for determining which plugin layers need to be
|
|
8261
|
+
* provided when creating a server, as UploadEngine is automatically
|
|
8262
|
+
* provided by the runtime.
|
|
8263
|
+
*
|
|
8264
|
+
* @template TNodes - The record of node definitions
|
|
8265
|
+
*
|
|
8266
|
+
* @example
|
|
8267
|
+
* ```typescript
|
|
8268
|
+
* const myFlow = createFlow({
|
|
8269
|
+
* nodes: {
|
|
8270
|
+
* resize: imageResizeNode, // requires ImagePlugin
|
|
8271
|
+
* upload: s3OutputNode, // requires UploadEngine
|
|
8272
|
+
* },
|
|
8273
|
+
* edges: [...]
|
|
8274
|
+
* });
|
|
8275
|
+
* type PluginRequirements = FlowPluginRequirements<typeof myFlow.nodes>;
|
|
8276
|
+
* // PluginRequirements = ImagePlugin (UploadEngine excluded)
|
|
8277
|
+
* ```
|
|
8278
|
+
*/
|
|
8279
|
+
type FlowPluginRequirements<TNodes extends NodeDefinitionsRecord> = Exclude<FlowRequirements<TNodes>, UploadEngine>;
|
|
8280
|
+
/**
|
|
8281
|
+
* Infers the concrete FlowNode type from a NodeDefinition.
|
|
8282
|
+
*
|
|
8283
|
+
* If the definition is already a FlowNode, returns it as-is.
|
|
8284
|
+
* If the definition is an Effect, extracts the FlowNode from the Effect's success type.
|
|
8285
|
+
*
|
|
8286
|
+
* Uses the shared ResolveEffect utility for consistency.
|
|
8287
|
+
*/
|
|
8288
|
+
type InferNode<T> = T extends FlowNode<any, any, UploadistaError> ? T : ResolveEffect<T> extends FlowNode<any, any, UploadistaError> ? ResolveEffect<T> : never;
|
|
8289
|
+
type ExtractKeysByNodeType<TNodes extends NodeDefinitionsRecord, TType extends NodeType> = { [K in keyof TNodes]: InferNode<TNodes[K]>["type"] extends TType ? K : never }[keyof TNodes];
|
|
8290
|
+
type SchemaInfer<T> = T extends z.ZodTypeAny ? z.infer<T> : never;
|
|
8291
|
+
type FlowInputMap<TNodes extends NodeDefinitionsRecord> = { [K in Extract<ExtractKeysByNodeType<TNodes, NodeType.input>, string>]: SchemaInfer<InferNode<TNodes[K]>["inputSchema"]> };
|
|
8292
|
+
type FlowOutputMap<TNodes extends NodeDefinitionsRecord> = { [K in Extract<keyof TNodes, string>]: SchemaInfer<InferNode<TNodes[K]>["outputSchema"]> };
|
|
8293
|
+
type FlowInputUnion<TNodes extends NodeDefinitionsRecord> = { [K in Extract<ExtractKeysByNodeType<TNodes, NodeType.input>, string>]: SchemaInfer<InferNode<TNodes[K]>["inputSchema"]> }[Extract<ExtractKeysByNodeType<TNodes, NodeType.input>, string>];
|
|
8294
|
+
type FlowOutputUnion<TNodes extends NodeDefinitionsRecord> = { [K in Extract<keyof TNodes, string>]: SchemaInfer<InferNode<TNodes[K]>["outputSchema"]> }[Extract<keyof TNodes, string>];
|
|
8295
|
+
type NodeKey<TNodes extends NodeDefinitionsRecord> = Extract<keyof TNodes, string>;
|
|
8296
|
+
type TypedFlowEdge<TNodes extends NodeDefinitionsRecord> = {
|
|
8297
|
+
source: NodeKey<TNodes>;
|
|
8298
|
+
target: NodeKey<TNodes>;
|
|
8299
|
+
sourcePort?: string;
|
|
8300
|
+
targetPort?: string;
|
|
8301
|
+
};
|
|
8302
|
+
type TypedFlowConfig<TNodes extends NodeDefinitionsRecord> = {
|
|
8303
|
+
flowId: string;
|
|
8304
|
+
name: string;
|
|
8305
|
+
nodes: TNodes;
|
|
8306
|
+
edges: Array<TypedFlowEdge<TNodes>>;
|
|
8307
|
+
typeChecker?: TypeCompatibilityChecker;
|
|
8308
|
+
onEvent?: (event: FlowEvent) => Effect.Effect<{
|
|
8309
|
+
eventId: string | null;
|
|
8310
|
+
}, UploadistaError>;
|
|
8311
|
+
parallelExecution?: {
|
|
8312
|
+
enabled?: boolean;
|
|
8313
|
+
maxConcurrency?: number;
|
|
8314
|
+
};
|
|
8315
|
+
inputSchema?: z.ZodTypeAny;
|
|
8316
|
+
outputSchema?: z.ZodTypeAny;
|
|
8317
|
+
hooks?: {
|
|
8318
|
+
/**
|
|
8319
|
+
* Called when a sink node (terminal node with no outgoing edges) produces an output.
|
|
8320
|
+
* This hook runs after auto-persistence for UploadFile outputs.
|
|
8321
|
+
*
|
|
8322
|
+
* Use this hook to perform additional post-processing such as:
|
|
8323
|
+
* - Saving output metadata to a database
|
|
8324
|
+
* - Tracking outputs in external systems
|
|
8325
|
+
* - Adding custom metadata to outputs
|
|
8326
|
+
* - Triggering downstream workflows
|
|
8327
|
+
*
|
|
8328
|
+
* **Important**: The hook must not have any service requirements (Effect requirements must be `never`).
|
|
8329
|
+
* All necessary services should be captured in the closure when defining the hook.
|
|
8330
|
+
*
|
|
8331
|
+
* @example
|
|
8332
|
+
* ```typescript
|
|
8333
|
+
* // Using Promise (simpler for most users)
|
|
8334
|
+
* hooks: {
|
|
8335
|
+
* onNodeOutput: async ({ output }) => {
|
|
8336
|
+
* await db.save(output);
|
|
8337
|
+
* return output;
|
|
8338
|
+
* }
|
|
8339
|
+
* }
|
|
8340
|
+
* ```
|
|
8341
|
+
*/
|
|
8342
|
+
onNodeOutput?: <TOutput>(context: {
|
|
8343
|
+
output: TOutput;
|
|
8344
|
+
nodeId: string;
|
|
8345
|
+
flowId: string;
|
|
8346
|
+
jobId: string;
|
|
8347
|
+
storageId: string;
|
|
8348
|
+
clientId: string | null;
|
|
8349
|
+
}) => Effect.Effect<TOutput, UploadistaError, never> | Promise<TOutput>;
|
|
8350
|
+
};
|
|
8435
8351
|
/**
|
|
8436
|
-
*
|
|
8437
|
-
*
|
|
8352
|
+
* Circuit breaker configuration for resilience against external service failures.
|
|
8353
|
+
*
|
|
8354
|
+
* @example
|
|
8355
|
+
* ```typescript
|
|
8356
|
+
* circuitBreaker: {
|
|
8357
|
+
* defaults: { enabled: false },
|
|
8358
|
+
* nodeTypeOverrides: {
|
|
8359
|
+
* "Describe Image": {
|
|
8360
|
+
* enabled: true,
|
|
8361
|
+
* failureThreshold: 5,
|
|
8362
|
+
* resetTimeout: 60000,
|
|
8363
|
+
* fallback: { type: "skip", passThrough: true }
|
|
8364
|
+
* }
|
|
8365
|
+
* }
|
|
8366
|
+
* }
|
|
8367
|
+
* ```
|
|
8438
8368
|
*/
|
|
8439
|
-
|
|
8440
|
-
|
|
8369
|
+
circuitBreaker?: {
|
|
8370
|
+
/** Default circuit breaker config for all nodes */
|
|
8371
|
+
defaults?: FlowCircuitBreakerConfig;
|
|
8372
|
+
/** Override circuit breaker config per node type (node name) */
|
|
8373
|
+
nodeTypeOverrides?: Record<string, FlowCircuitBreakerConfig>;
|
|
8374
|
+
};
|
|
8375
|
+
};
|
|
8376
|
+
declare const typedFlowInputsSymbol: unique symbol;
|
|
8377
|
+
declare const typedFlowOutputsSymbol: unique symbol;
|
|
8378
|
+
declare const typedFlowPluginsSymbol: unique symbol;
|
|
8441
8379
|
/**
|
|
8442
|
-
*
|
|
8380
|
+
* A type-safe Flow that infers input/output types and requirements from its nodes.
|
|
8443
8381
|
*
|
|
8444
|
-
*
|
|
8382
|
+
* TypedFlow extends the base Flow type with additional type information that
|
|
8383
|
+
* allows TypeScript to verify inputs, outputs, and plugin requirements at compile time.
|
|
8384
|
+
*
|
|
8385
|
+
* The phantom type properties (using unique symbols) enable type-level metadata
|
|
8386
|
+
* without affecting runtime behavior, allowing other type utilities to extract
|
|
8387
|
+
* this information for validation purposes.
|
|
8388
|
+
*
|
|
8389
|
+
* @template TNodes - Record of node definitions used in the flow
|
|
8390
|
+
* @template TInputSchema - Zod schema for flow inputs (inferred from input nodes)
|
|
8391
|
+
* @template TOutputSchema - Zod schema for flow outputs (inferred from output nodes)
|
|
8392
|
+
*
|
|
8393
|
+
* @example
|
|
8394
|
+
* ```typescript
|
|
8395
|
+
* const myFlow = createFlow({
|
|
8396
|
+
* nodes: {
|
|
8397
|
+
* input: fileInputNode,
|
|
8398
|
+
* resize: imageResizeNode,
|
|
8399
|
+
* output: s3OutputNode
|
|
8400
|
+
* },
|
|
8401
|
+
* edges: [
|
|
8402
|
+
* { source: 'input', target: 'resize' },
|
|
8403
|
+
* { source: 'resize', target: 'output' }
|
|
8404
|
+
* ]
|
|
8405
|
+
* });
|
|
8406
|
+
*
|
|
8407
|
+
* // TypeScript infers:
|
|
8408
|
+
* // - Input types from fileInputNode.inputSchema
|
|
8409
|
+
* // - Output types from s3OutputNode.outputSchema
|
|
8410
|
+
* // - Requirements: ImagePlugin (from resize node)
|
|
8411
|
+
* ```
|
|
8445
8412
|
*/
|
|
8446
|
-
|
|
8413
|
+
type TypedFlow<TNodes extends NodeDefinitionsRecord, TInputSchema extends z.ZodTypeAny, TOutputSchema extends z.ZodTypeAny> = Flow<TInputSchema, TOutputSchema, FlowRequirements<TNodes>> & {
|
|
8414
|
+
run: (args: {
|
|
8415
|
+
inputs?: Partial<FlowInputMap<TNodes>>;
|
|
8416
|
+
storageId: string;
|
|
8417
|
+
jobId: string;
|
|
8418
|
+
}) => Effect.Effect<FlowExecutionResult<FlowOutputMap<TNodes>>, UploadistaError, FlowRequirements<TNodes>>;
|
|
8419
|
+
resume: (args: {
|
|
8420
|
+
jobId: string;
|
|
8421
|
+
storageId: string;
|
|
8422
|
+
nodeResults: Record<string, unknown>;
|
|
8423
|
+
executionState: {
|
|
8424
|
+
executionOrder: string[];
|
|
8425
|
+
currentIndex: number;
|
|
8426
|
+
inputs: Partial<FlowInputMap<TNodes>>;
|
|
8427
|
+
};
|
|
8428
|
+
}) => Effect.Effect<FlowExecutionResult<FlowOutputMap<TNodes>>, UploadistaError, FlowRequirements<TNodes>>;
|
|
8429
|
+
readonly [typedFlowInputsSymbol]?: FlowInputMap<TNodes>;
|
|
8430
|
+
readonly [typedFlowOutputsSymbol]?: FlowOutputMap<TNodes>;
|
|
8431
|
+
readonly [typedFlowPluginsSymbol]?: FlowPluginRequirements<TNodes>;
|
|
8432
|
+
};
|
|
8433
|
+
declare function createFlow<TNodes extends NodeDefinitionsRecord>(config: TypedFlowConfig<TNodes>): Effect.Effect<TypedFlow<TNodes, z.ZodType<FlowInputUnion<TNodes>>, z.ZodType<FlowOutputUnion<TNodes>>>, NodesErrorUnion<TNodes> | UploadistaError, FlowRequirements<TNodes>>;
|
|
8434
|
+
//#endregion
|
|
8435
|
+
//#region src/flow/types/run-args.d.ts
|
|
8447
8436
|
/**
|
|
8448
|
-
*
|
|
8437
|
+
* Zod schema for validating flow run arguments.
|
|
8438
|
+
*
|
|
8439
|
+
* @property inputs - Record mapping input node IDs to their input data
|
|
8449
8440
|
*
|
|
8450
8441
|
* @example
|
|
8451
8442
|
* ```typescript
|
|
8452
|
-
* const
|
|
8453
|
-
*
|
|
8454
|
-
*
|
|
8455
|
-
*
|
|
8456
|
-
*
|
|
8457
|
-
*
|
|
8458
|
-
*
|
|
8459
|
-
*
|
|
8460
|
-
* );
|
|
8443
|
+
* const args = {
|
|
8444
|
+
* inputs: {
|
|
8445
|
+
* "input-node-1": { file: myFile, metadata: { ... } },
|
|
8446
|
+
* "input-node-2": { file: anotherFile }
|
|
8447
|
+
* }
|
|
8448
|
+
* };
|
|
8449
|
+
*
|
|
8450
|
+
* // Validate before running
|
|
8451
|
+
* const validated = runArgsSchema.parse(args);
|
|
8461
8452
|
* ```
|
|
8462
8453
|
*/
|
|
8463
|
-
declare const
|
|
8464
|
-
|
|
8465
|
-
|
|
8466
|
-
|
|
8467
|
-
|
|
8468
|
-
|
|
8469
|
-
|
|
8470
|
-
|
|
8471
|
-
|
|
8472
|
-
|
|
8473
|
-
declare function resolveUploadMetadata(metadata: FileMetadata): ResolvedUploadMetadata;
|
|
8454
|
+
declare const runArgsSchema: z.ZodObject<{
|
|
8455
|
+
inputs: z.ZodRecord<z.ZodString, z.ZodAny>;
|
|
8456
|
+
}, z.core.$strip>;
|
|
8457
|
+
/**
|
|
8458
|
+
* Type representing validated flow run arguments.
|
|
8459
|
+
*
|
|
8460
|
+
* This type is inferred from the runArgsSchema and ensures type safety
|
|
8461
|
+
* when passing inputs to flow execution.
|
|
8462
|
+
*/
|
|
8463
|
+
type RunArgs = z.infer<typeof runArgsSchema>;
|
|
8474
8464
|
//#endregion
|
|
8475
8465
|
//#region src/flow/utils/file-naming.d.ts
|
|
8476
8466
|
/**
|
|
@@ -8669,5 +8659,15 @@ declare const AVAILABLE_TEMPLATE_VARIABLES: readonly [{
|
|
|
8669
8659
|
readonly example: "1";
|
|
8670
8660
|
}];
|
|
8671
8661
|
//#endregion
|
|
8672
|
-
|
|
8673
|
-
|
|
8662
|
+
//#region src/flow/utils/resolve-upload-metadata.d.ts
|
|
8663
|
+
type FileMetadata = UploadFile["metadata"];
|
|
8664
|
+
type ResolvedUploadMetadata = {
|
|
8665
|
+
type: string;
|
|
8666
|
+
fileName: string;
|
|
8667
|
+
metadata: FileMetadata;
|
|
8668
|
+
metadataJson: string | undefined;
|
|
8669
|
+
};
|
|
8670
|
+
declare function resolveUploadMetadata(metadata: FileMetadata): ResolvedUploadMetadata;
|
|
8671
|
+
//#endregion
|
|
8672
|
+
export { VideoPlugin as $, memoryCircuitBreakerStoreLayer as $a, FlowDeadLetterQueueConfig as $i, UploadEngineShape as $n, DeadLetterListOptions as $r, ImageAiPluginLayer as $t, isImageDescriptionOutput as A, FlowEventFlowError as Aa, DataStore as Ai, ExtractLayerServices as An, uploadEventEmitter as Ar, Transformation as At, describeImageParamsSchema as B, FlowEventNodeStart as Ba, UploadStrategy as Bi, FlowEngineOptions as Bn, DeadLetterQueueServiceShape as Br, rotateTransformSchema as Bt, createTypeGuard as C, FlowEventDlqItemExhausted as Ca, FlowData as Ci, StreamingTransformResult as Cn, CircuitBreakerEventHandler as Co, BaseEventEmitterService as Cr, OverlayPosition as Ct, getSingleOutputByType as D, FlowEventDlqRetrySuccess as Da, BufferedUploadFileDataStore as Di, ExtractEffectError as Dn, UploadEventEmitter as Dr, SharpenTransform as Dt, getOutputByNodeId as E, FlowEventDlqRetryStart as Ea, getFlowData as Ei, createTransformNode as En, DEFAULT_CIRCUIT_BREAKER_CONFIG as Eo, TypedEventEmitter as Er, SepiaTransform as Et, isUploadOperation as F, FlowEventNodeEnd as Fa, StreamWriteResult as Fi, createInputNode as Fn, UploadEventType as Fr, contrastTransformSchema as Ft, ZipPlugin as G, createFlowNode as Ga, AutoNamingSuffixGenerator as Gi, WaitUntilCallback as Gn, FlowJobTask as Gr, transformationSchema as Gt, PluginLayer as H, ConditionOperator as Ha, isDataStore as Hi, FlowProvider as Hn, deadLetterQueueService as Hr, sharpenTransformSchema as Ht, isUrlOperation as I, FlowEventNodeError as Ia, StreamingConfig as Ii, inputDataSchema as In, uploadEventSchema as Ir, flipTransformSchema as It, ScanMetadata as J, DistributedCircuitBreaker as Ja, FileNamingConfig as Ji, NegotiatedStrategy as Jn, DeadLetterCleanupOptions as Jr, resizeParamsSchema as Jt, ZipPluginLayer as K, getNodeData as Ka, BuiltInTypedOutput as Ki, createFlowEngine as Kn, FlowJobTaskStatus as Kr, watermarkTransformSchema as Kt, RemoveBackgroundParams as L, FlowEventNodePause as La, UploadFileDataStore as Li, inputNodeParamsSchema as Ln, EventBroadcaster as Lr, grayscaleTransformSchema as Lt, isOcrOutput as M, FlowEventFlowStart as Ma, DataStoreConfig as Mi, FlowCondition as Mn, WebSocketMessage as Mr, WatermarkTransform as Mt, isStorageOutput as N, FlowEventJobEnd as Na, DataStoreWriteOptions as Ni, InputData as Nn, webSocketMessageSchema as Nr, blurTransformSchema as Nt, hasOutputOfType as O, FlowEventFlowCancel as Oa, DEFAULT_MULTIPART_PART_SIZE as Oi, ExtractEffectRequirements as On, eventToMessageSerializer as Or, TextTransform as Ot, isUploadFile as P, FlowEventJobStart as Pa, StreamWriteOptions as Pi, InputNodeParams as Pn, UploadEvent as Pr, brightnessTransformSchema as Pt, VirusScanPluginShape as Q, makeMemoryCircuitBreakerStore as Qa, FlowConfig as Qi, UploadEngineOptions as Qn, DeadLetterItemStatus as Qr, ImageAiPlugin as Qt, removeBackgroundParamsSchema as R, FlowEventNodeResponse as Ra, UploadFileDataStores as Ri, FlowEngine as Rn, EventBroadcasterService as Rr, logoTransformSchema as Rt, NarrowedTypedOutput as S, FlowEventDlqItemAdded as Sa, Flow as Si, StreamingTransformFn as Sn, CircuitBreakerEvent as So, BaseEventEmitter as Sr, LogoTransform as St, getFirstOutputByType as T, FlowEventDlqRetryFailed as Ta, createFlowWithSchema as Ti, TransformNodeConfig as Tn, CircuitBreakerState as To, FlowEventEmitter as Tr, RotateTransform as Tt, ZipInput as U, ConditionValue as Ua, FlowEdge as Ui, FlowProviderShape as Un, FlowJob as Ur, textTransformSchema as Ut, Plugin as V, ConditionField as Va, createDataStoreLayer as Vi, FlowEngineShape as Vn, createDeadLetterQueueService as Vr, sepiaTransformSchema as Vt, ZipParams as W, NodeType as Wa, createFlowEdge as Wi, FlowWaitUntil as Wn, FlowJobStatus as Wr, transformImageParamsSchema as Wt, VirusScanPlugin as X, kvCircuitBreakerStoreLayer as Xa, FlowCircuitBreakerConfig as Xi, UploadStrategyOptions as Xn, DeadLetterError as Xr, optimizeParamsSchema as Xt, ScanResult as Y, DistributedCircuitBreakerRegistry as Ya, FileNamingFunction as Yi, UploadStrategyNegotiator as Yn, DeadLetterCleanupResult as Yr, OptimizeParams as Yt, VirusScanPluginLayer as Z, makeKvCircuitBreakerStore as Za, FlowCircuitBreakerFallback as Zi, UploadEngine as Zn, DeadLetterItem as Zr, ImageAiContext as Zt, NodeDefinitionsRecord as _, calculateExpirationDate as _a, InputTypeDefinition as _i, CredentialProviderLayer as _n, UploadFile as _o, HealthResponse as _r, BlurTransform as _t, buildNamingContext as a, NodeTypeMap as aa, ImageDescriptionOutput as ai, MergePdfParams as an, createInitialCircuitBreakerState as ao, MiddlewareContext as ar, trimVideoParamsSchema as at, TypedFlowEdge as b, EventType as ba, inputTypeRegistry as bi, ParallelScheduler as bn, uploadFileSchema as bo, formatHealthAsText as br, FlipTransform as bt, interpolateFileName as c, completeNodeExecution as ca, STORAGE_OUTPUT_TYPE_ID as ci, DocumentAiContext as cn, DeadLetterQueueKVStore as co, MiddlewareServiceLive as cr, ResizeVideoParams as ct, runArgsSchema as d, DEFAULT_RETRY_POLICY as da, ocrOutputSchema as di, DocumentAiPluginShape as dn, TypedKvStore as do, CircuitBreakerHealthSummary as dr, extractFrameVideoParamsSchema as dt, FlowNode as ea, DeadLetterProcessResult as ei, ImageAiPluginShape as en, CircuitBreakerStateData as eo, createUploadEngine as er, VideoPluginLayer as et, FlowInputMap as f, ExponentialBackoff as fa, OutputTypeDefinition as fi, OcrParams as fn, UploadFileKVStore as fo, ComponentHealth as fr, DescribeVideoMetadata as ft, NodeDefinition as g, calculateBackoffDelay as ga, validateFlowOutput as gi, CredentialProvider as gn, uploadFileKvStore as go, HealthComponents as gr, ImagePluginShape as gt, FlowRequirements as h, RetryPolicy as ha, outputTypeRegistry as hi, OcrTaskType as hn, jsonSerializer as ho, HealthCheckConfig as hr, ImagePluginLayer as ht, applyFileNaming as i, NodeExecutionResult as ia, IMAGE_DESCRIPTION_OUTPUT_TYPE_ID as ii, DocumentPluginShape as in, CircuitBreakerStoreService as io, Middleware as ir, TrimVideoParams as it, isInitOperation as j, FlowEventFlowPause as ja, DataStoreCapabilities as ji, ResolveEffect as jn, WebSocketConnection as jr, TransformationType as jt, isFinalizeOperation as k, FlowEventFlowEnd as ka, DEFAULT_STREAMING_CONFIG as ki, ExtractLayerService as kn, flowEventEmitter as kr, TransformImageParams as kt, validatePattern as l, waitingNodeExecution as la, STREAMING_INPUT_TYPE_ID as li, DocumentAiPlugin as ln, FlowJobKVStore as lo, InputFile as lr, resizeVideoParamsSchema as lt, FlowPluginRequirements as m, ImmediateBackoff as ma, OutputValidationResult as mi, OcrResult as mn, flowJobKvStore as mo, DlqHealthSummary as mr, ImagePlugin as mt, resolveUploadMetadata as n, NamingContext as na, DeadLetterRetryAllResult as ni, DocumentPlugin as nn, CircuitBreakerStats as no, compareMimeTypes as nr, VideoStreamInput as nt, getBaseName as o, TypeCompatibilityChecker as oa, OCR_OUTPUT_TYPE_ID as oi, SplitPdfParams as on, BaseKvStore as oo, MiddlewareNext as or, TranscodeVideoParams as ot, FlowOutputMap as p, FixedBackoff as pa, OutputTypeRegistry as pi, OcrResolution as pn, deadLetterQueueKvStore as po, DEFAULT_HEALTH_CHECK_CONFIG as pr, describeVideoMetadataSchema as pt, ZipPluginShape as q, AllowRequestResult as qa, CustomTypedOutput as qi, flowEngine as qn, FlowJobTraceContext as qr, ResizeParams as qt, AVAILABLE_TEMPLATE_VARIABLES as r, NodeConnectionValidator as ra, DeadLetterRetryAttempt as ri, DocumentPluginLayer as rn, CircuitBreakerStore as ro, detectMimeType as rr, VideoStreamOptions as rt, getExtension as s, TypedOutput as sa, OcrOutput as si, SplitPdfResult as sn, BaseKvStoreService as so, MiddlewareService as sr, transcodeVideoParamsSchema as st, ResolvedUploadMetadata as t, FlowNodeData as ta, DeadLetterQueueStats as ti, DocumentMetadata as tn, CircuitBreakerStateValue as to, uploadEngine as tr, VideoPluginShape as tt, RunArgs as u, BackoffStrategy as ua, imageDescriptionOutputSchema as ui, DocumentAiPluginLayer as un, KvStore as uo, inputFileSchema as ur, ExtractFrameVideoParams as ut, TypedFlow as v, isErrorRetryable as va, InputTypeRegistry as vi, CredentialProviderShape as vn, UploadFileTraceContext as vo, HealthResponseFormat as vr, BrightnessTransform as vt, filterOutputsByType as w, FlowEventDlqItemResolved as wa, FlowExecutionResult as wi, TransformMode as wn, CircuitBreakerFallback as wo, EventEmitter as wr, ResizeTransform as wt, createFlow as x, FlowEvent as xa, validateFlowInput as xi, ParallelSchedulerConfig as xn, CircuitBreakerConfig as xo, getHealthResponseFormat as xr, GrayscaleTransform as xt, TypedFlowConfig as y, DlqEvent as ya, InputValidationResult as yi, ExecutionLevel as yn, traceContextSchema as yo, HealthStatus as yr, ContrastTransform as yt, DescribeImageParams as z, FlowEventNodeResume as za, UploadFileDataStoresShape as zi, FlowEngineLayer as zn, DeadLetterQueueService as zr, resizeTransformSchema as zt };
|
|
8673
|
+
//# sourceMappingURL=index-DMqaf28W.d.cts.map
|