@blinkdotnew/dev-sdk 2.1.2 → 2.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -61,6 +61,39 @@ interface BlinkClientConfig {
61
61
  projectId: string;
62
62
  authRequired?: boolean;
63
63
  auth?: BlinkAuthConfig;
64
+ /**
65
+ * Publishable key (client-safe).
66
+ *
67
+ * Used for **public endpoints** when no user JWT is present (e.g. analytics ingest, storage upload,
68
+ * optional public DB reads). Never use for privileged operations.
69
+ */
70
+ publishableKey?: string;
71
+ /**
72
+ * Secret key (server-only, privileged). Permanent, never expires.
73
+ *
74
+ * Used in **server runtimes** (Edge Functions, Workers) for privileged operations that require
75
+ * service-role access (e.g. raw SQL, bypassing row-level security).
76
+ *
77
+ * Format: `blnk_sk_{projectId-last-8}_{random}` (similar to Stripe's `sk_live_...`)
78
+ *
79
+ * **Security**: Never expose this key in client-side code. It is injected by the platform
80
+ * into edge function environments as `BLINK_SECRET_KEY`.
81
+ *
82
+ * When present, this key takes precedence over user JWTs for all requests.
83
+ *
84
+ * @example
85
+ * // Edge function (Deno)
86
+ * const blink = createClient({
87
+ * projectId: Deno.env.get('BLINK_PROJECT_ID')!,
88
+ * secretKey: Deno.env.get('BLINK_SECRET_KEY'),
89
+ * })
90
+ */
91
+ secretKey?: string;
92
+ /**
93
+ * @deprecated Use `secretKey` instead. Service tokens are JWT-based and expire after 365 days.
94
+ * Secret keys are permanent and never expire.
95
+ */
96
+ serviceToken?: string;
64
97
  /**
65
98
  * Storage adapter for cross-platform token persistence
66
99
  *
@@ -281,6 +314,9 @@ declare class BlinkError extends Error {
281
314
  constructor(message: string, code?: string | undefined, status?: number | undefined, details?: any);
282
315
  }
283
316
  interface StorageUploadOptions {
317
+ /**
318
+ * @deprecated Blink storage uploads are add-only by default. This option is ignored.
319
+ */
284
320
  upsert?: boolean;
285
321
  onProgress?: (percent: number) => void;
286
322
  }
@@ -445,6 +481,45 @@ interface TranscriptionRequest {
445
481
  response_format?: 'json' | 'text' | 'srt' | 'verbose_json' | 'vtt';
446
482
  signal?: AbortSignal;
447
483
  }
484
+ interface VideoGenerationRequest {
485
+ prompt: string;
486
+ model?: string;
487
+ image_url?: string;
488
+ duration?: string;
489
+ aspect_ratio?: string;
490
+ resolution?: string;
491
+ negative_prompt?: string;
492
+ generate_audio?: boolean;
493
+ seed?: number;
494
+ cfg_scale?: number;
495
+ signal?: AbortSignal;
496
+ }
497
+ interface VideoGenerationResponse {
498
+ result: {
499
+ video: {
500
+ url: string;
501
+ content_type?: string;
502
+ file_name?: string;
503
+ file_size?: number;
504
+ };
505
+ seed?: number;
506
+ video_id?: string;
507
+ thumbnail?: {
508
+ url: string;
509
+ };
510
+ };
511
+ metadata?: {
512
+ projectId: string;
513
+ timestamp: string;
514
+ provider: string;
515
+ model: string;
516
+ };
517
+ usage?: {
518
+ creditsCharged: number;
519
+ costUSD: number;
520
+ model: string;
521
+ };
522
+ }
448
523
  interface TranscriptionResponse {
449
524
  text: string;
450
525
  transcript?: string;
@@ -483,6 +558,7 @@ interface BlinkAI {
483
558
  background?: "auto" | "transparent" | "opaque";
484
559
  signal?: AbortSignal;
485
560
  }): Promise<ImageGenerationResponse>;
561
+ generateVideo(options: VideoGenerationRequest): Promise<VideoGenerationResponse>;
486
562
  generateSpeech(options: SpeechGenerationRequest): Promise<SpeechGenerationResponse>;
487
563
  transcribeAudio(options: TranscriptionRequest): Promise<TranscriptionResponse>;
488
564
  }
@@ -731,13 +807,61 @@ interface BlinkNotifications {
731
807
  email(params: SendEmailRequest): Promise<SendEmailResponse>;
732
808
  }
733
809
 
810
+ type ConnectorProvider = 'discord' | 'notion' | 'google_drive' | 'google_calendar' | 'ai';
811
+ type ConnectorAuthMode = 'oauth' | 'api_key' | 'blink_managed' | 'hybrid';
812
+ interface ConnectorStatusData {
813
+ connected: boolean;
814
+ provider: ConnectorProvider;
815
+ auth_mode?: ConnectorAuthMode;
816
+ account_id?: string;
817
+ metadata?: Record<string, unknown>;
818
+ expires_at?: any;
819
+ scopes?: string[];
820
+ }
821
+ interface ConnectorStatusResponse {
822
+ success: boolean;
823
+ data: ConnectorStatusData;
824
+ }
825
+ interface ConnectorExecuteRequest<TParams = Record<string, unknown>> {
826
+ method: string;
827
+ params?: TParams;
828
+ account_id?: string;
829
+ http_method?: string;
830
+ }
831
+ interface ConnectorExecuteResponse<TData = any> {
832
+ success: boolean;
833
+ data: TData;
834
+ }
835
+ interface ConnectorApiKeyRequest<TMetadata = Record<string, unknown>> {
836
+ api_key: string;
837
+ account_id?: string;
838
+ metadata?: TMetadata;
839
+ }
840
+ interface ConnectorApiKeyResponse {
841
+ success: boolean;
842
+ data: {
843
+ id: string;
844
+ account_id?: string;
845
+ };
846
+ }
847
+ interface BlinkConnectors {
848
+ status(provider: ConnectorProvider, options?: {
849
+ account_id?: string;
850
+ }): Promise<ConnectorStatusResponse>;
851
+ execute<TParams = Record<string, unknown>, TData = any>(provider: ConnectorProvider, request: ConnectorExecuteRequest<TParams>): Promise<ConnectorExecuteResponse<TData>>;
852
+ saveApiKey<TMetadata = Record<string, unknown>>(provider: ConnectorProvider, request: ConnectorApiKeyRequest<TMetadata>): Promise<ConnectorApiKeyResponse>;
853
+ }
854
+ declare class BlinkConnectorError extends BlinkError {
855
+ constructor(message: string, status?: number, details?: any);
856
+ }
857
+
734
858
  /**
735
859
  * HTTP client for Blink API requests
736
860
  * Handles authentication, error handling, and request/response processing
737
861
  */
738
862
 
739
863
  interface RequestOptions {
740
- method?: 'GET' | 'POST' | 'PATCH' | 'DELETE';
864
+ method?: 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE';
741
865
  headers?: Record<string, string>;
742
866
  body?: any;
743
867
  searchParams?: Record<string, string>;
@@ -752,9 +876,14 @@ declare class HttpClient {
752
876
  private readonly authUrl;
753
877
  private readonly coreUrl;
754
878
  readonly projectId: string;
879
+ private readonly publishableKey?;
880
+ private readonly secretKey?;
755
881
  private getToken;
756
882
  private getValidToken?;
757
883
  constructor(config: BlinkClientConfig, getToken: () => string | null, getValidToken?: () => Promise<string | null>);
884
+ private shouldAttachPublishableKey;
885
+ private shouldSkipSecretKey;
886
+ private getAuthorizationHeader;
758
887
  /**
759
888
  * Make an authenticated request to the Blink API
760
889
  */
@@ -836,7 +965,7 @@ declare class HttpClient {
836
965
  signal?: AbortSignal;
837
966
  }): Promise<BlinkResponse<any>>;
838
967
  /**
839
- * Stream AI text generation with Vercel AI SDK data stream format
968
+ * Stream AI text generation - uses Vercel AI SDK's pipeUIMessageStreamToResponse (Data Stream Protocol)
840
969
  */
841
970
  streamAiText(prompt: string, options: {
842
971
  model?: string | undefined;
@@ -860,7 +989,7 @@ declare class HttpClient {
860
989
  signal?: AbortSignal;
861
990
  }): Promise<BlinkResponse<any>>;
862
991
  /**
863
- * Stream AI object generation with Vercel AI SDK data stream format
992
+ * Stream AI object generation - uses Vercel AI SDK's pipeTextStreamToResponse
864
993
  */
865
994
  streamAiObject(prompt: string, options: {
866
995
  model?: string | undefined;
@@ -895,6 +1024,18 @@ declare class HttpClient {
895
1024
  response_format?: string;
896
1025
  signal?: AbortSignal;
897
1026
  }): Promise<BlinkResponse<any>>;
1027
+ aiVideo(prompt: string, options?: {
1028
+ model?: string;
1029
+ image_url?: string;
1030
+ duration?: string;
1031
+ aspect_ratio?: string;
1032
+ resolution?: string;
1033
+ negative_prompt?: string;
1034
+ generate_audio?: boolean;
1035
+ seed?: number;
1036
+ cfg_scale?: number;
1037
+ signal?: AbortSignal;
1038
+ }): Promise<BlinkResponse<any>>;
898
1039
  /**
899
1040
  * Data-specific requests
900
1041
  */
@@ -904,6 +1045,13 @@ declare class HttpClient {
904
1045
  dataScreenshot(projectId: string, request: ScreenshotRequest): Promise<BlinkResponse<ScreenshotResponse>>;
905
1046
  dataFetch(projectId: string, request: FetchRequest): Promise<BlinkResponse<FetchResponse | AsyncFetchResponse>>;
906
1047
  dataSearch(projectId: string, request: SearchRequest): Promise<BlinkResponse<SearchResponse>>;
1048
+ /**
1049
+ * Connector requests
1050
+ */
1051
+ private formatProviderForPath;
1052
+ connectorStatus(provider: ConnectorProvider): Promise<BlinkResponse<ConnectorStatusResponse>>;
1053
+ connectorExecute<TParams = Record<string, unknown>, TData = any>(provider: ConnectorProvider, request: ConnectorExecuteRequest<TParams>): Promise<BlinkResponse<ConnectorExecuteResponse<TData>>>;
1054
+ connectorSaveApiKey<TMetadata = Record<string, unknown>>(provider: ConnectorProvider, request: ConnectorApiKeyRequest<TMetadata>): Promise<BlinkResponse<ConnectorApiKeyResponse>>;
907
1055
  /**
908
1056
  * Realtime-specific requests
909
1057
  */
@@ -941,17 +1089,17 @@ declare class HttpClient {
941
1089
  private parseResponse;
942
1090
  private handleErrorResponse;
943
1091
  /**
944
- * Parse Vercel AI SDK data stream format
945
- * Handles text chunks (0:"text"), partial objects (2:[...]), and metadata (d:, e:)
1092
+ * Parse Vercel AI SDK v5 Data Stream Protocol (Server-Sent Events)
1093
+ * Supports all event types from the UI Message Stream protocol
946
1094
  */
947
- private parseDataStream;
1095
+ private parseDataStreamProtocol;
948
1096
  }
949
1097
 
950
1098
  /**
951
1099
  * Platform detection for cross-platform compatibility
952
- * Detects whether code is running on web, React Native, or Node.js
1100
+ * Detects whether code is running on web, React Native, Node.js, or Deno
953
1101
  */
954
- type Platform = 'web' | 'react-native' | 'node';
1102
+ type Platform = 'web' | 'react-native' | 'node' | 'deno';
955
1103
  /**
956
1104
  * Current platform
957
1105
  */
@@ -962,7 +1110,9 @@ declare const platform: Platform;
962
1110
  declare const isWeb: boolean;
963
1111
  declare const isReactNative: boolean;
964
1112
  declare const isNode: boolean;
1113
+ declare const isDeno: boolean;
965
1114
  declare const isBrowser: boolean;
1115
+ declare const isServer: boolean;
966
1116
 
967
1117
  /**
968
1118
  * Blink Auth Module - Client-side authentication management
@@ -1175,6 +1325,11 @@ declare class BlinkAuth {
1175
1325
  * Uses expo-web-browser to open auth URL and polls for completion
1176
1326
  */
1177
1327
  private signInWithProviderUniversal;
1328
+ /**
1329
+ * OAuth flow via parent window (for iframe context)
1330
+ * Delegates OAuth to parent window since OAuth providers block flows inside iframes
1331
+ */
1332
+ private signInWithProviderViaParent;
1178
1333
  /**
1179
1334
  * Generic provider sign-in method (headless mode)
1180
1335
  *
@@ -1633,6 +1788,50 @@ declare class BlinkAnalyticsImpl implements BlinkAnalytics {
1633
1788
  private detectChannel;
1634
1789
  }
1635
1790
 
1791
+ /**
1792
+ * Blink Functions - Edge function invocation helper
1793
+ * Provides a simple interface for calling Blink Edge Functions with automatic JWT attachment
1794
+ */
1795
+
1796
+ interface FunctionsInvokeOptions {
1797
+ method?: 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE';
1798
+ body?: any;
1799
+ headers?: Record<string, string>;
1800
+ searchParams?: Record<string, string>;
1801
+ }
1802
+ interface FunctionsInvokeResponse<T = any> {
1803
+ data: T;
1804
+ status: number;
1805
+ headers: Headers;
1806
+ }
1807
+ interface BlinkFunctions {
1808
+ /**
1809
+ * Invoke a Blink Edge Function.
1810
+ *
1811
+ * Automatically attaches the user's JWT for authenticated requests.
1812
+ * The function URL is constructed as: https://{projectSuffix}--{functionSlug}.functions.blink.new
1813
+ *
1814
+ * @param functionSlug - The slug of the edge function to invoke
1815
+ * @param options - Request options (method, body, headers, etc.)
1816
+ * @returns The function response
1817
+ *
1818
+ * @example
1819
+ * // Simple POST request
1820
+ * const { data } = await blink.functions.invoke('my-function', {
1821
+ * method: 'POST',
1822
+ * body: { message: 'Hello' }
1823
+ * })
1824
+ *
1825
+ * @example
1826
+ * // GET request with query params
1827
+ * const { data } = await blink.functions.invoke('my-function', {
1828
+ * method: 'GET',
1829
+ * searchParams: { limit: '10' }
1830
+ * })
1831
+ */
1832
+ invoke<T = any>(functionSlug: string, options?: FunctionsInvokeOptions): Promise<FunctionsInvokeResponse<T>>;
1833
+ }
1834
+
1636
1835
  /**
1637
1836
  * Blink Client - Main SDK entry point
1638
1837
  * Factory function and client class for the Blink SDK
@@ -1647,6 +1846,8 @@ interface BlinkClient {
1647
1846
  realtime: BlinkRealtime;
1648
1847
  notifications: BlinkNotifications;
1649
1848
  analytics: BlinkAnalytics;
1849
+ connectors: BlinkConnectors;
1850
+ functions: BlinkFunctions;
1650
1851
  }
1651
1852
  /**
1652
1853
  * Create a new Blink client instance
@@ -2121,6 +2322,82 @@ declare class BlinkAIImpl implements BlinkAI {
2121
2322
  n?: number;
2122
2323
  signal?: AbortSignal;
2123
2324
  }): Promise<ImageGenerationResponse>;
2325
+ /**
2326
+ * Generates videos from text prompts or images using AI video generation models.
2327
+ *
2328
+ * @param options - Object containing:
2329
+ * - `prompt`: Text description of the video to generate (required)
2330
+ * - `model`: Video model to use (optional). Available models:
2331
+ * **Text-to-Video Models:**
2332
+ * - `"fal-ai/veo3.1"` - Google Veo 3.1 (best quality)
2333
+ * - `"fal-ai/veo3.1/fast"` (default) - Veo 3.1 fast mode (faster, cheaper)
2334
+ * - `"fal-ai/sora-2/text-to-video/pro"` - OpenAI Sora 2
2335
+ * - `"fal-ai/kling-video/v2.6/pro/text-to-video"` - Kling 2.6
2336
+ * **Image-to-Video Models:**
2337
+ * - `"fal-ai/veo3.1/image-to-video"` - Veo 3.1 I2V
2338
+ * - `"fal-ai/veo3.1/fast/image-to-video"` - Veo 3.1 fast I2V
2339
+ * - `"fal-ai/sora-2/image-to-video/pro"` - Sora 2 I2V
2340
+ * - `"fal-ai/kling-video/v2.6/pro/image-to-video"` - Kling 2.6 I2V
2341
+ * - `image_url`: Source image URL for image-to-video (required for I2V models)
2342
+ * - `duration`: Video duration ("4s", "5s", "6s", "8s", "10s", "12s")
2343
+ * - `aspect_ratio`: Aspect ratio ("16:9", "9:16", "1:1")
2344
+ * - `resolution`: Resolution ("720p", "1080p") - Veo/Sora only
2345
+ * - `negative_prompt`: What to avoid in generation - Veo/Kling only
2346
+ * - `generate_audio`: Generate audio with video (default: true)
2347
+ * - `seed`: For reproducibility - Veo only
2348
+ * - `cfg_scale`: Guidance scale (0-1) - Kling only
2349
+ * - Plus optional signal parameter
2350
+ *
2351
+ * @example
2352
+ * ```ts
2353
+ * // Basic text-to-video generation (uses default fast model)
2354
+ * const { result } = await blink.ai.generateVideo({
2355
+ * prompt: "A serene sunset over the ocean with gentle waves"
2356
+ * });
2357
+ * console.log("Video URL:", result.video.url);
2358
+ *
2359
+ * // High quality with Veo 3.1
2360
+ * const { result } = await blink.ai.generateVideo({
2361
+ * prompt: "A cinematic shot of a futuristic city at night",
2362
+ * model: "fal-ai/veo3.1",
2363
+ * resolution: "1080p",
2364
+ * aspect_ratio: "16:9"
2365
+ * });
2366
+ *
2367
+ * // Image-to-video animation
2368
+ * const { result } = await blink.ai.generateVideo({
2369
+ * prompt: "Animate this image with gentle camera movement",
2370
+ * model: "fal-ai/veo3.1/fast/image-to-video",
2371
+ * image_url: "https://example.com/my-image.jpg",
2372
+ * duration: "5s"
2373
+ * });
2374
+ *
2375
+ * // Using Sora 2 for creative videos
2376
+ * const { result } = await blink.ai.generateVideo({
2377
+ * prompt: "A magical forest with glowing fireflies",
2378
+ * model: "fal-ai/sora-2/text-to-video/pro",
2379
+ * duration: "8s"
2380
+ * });
2381
+ *
2382
+ * // Using Kling for detailed videos
2383
+ * const { result, usage } = await blink.ai.generateVideo({
2384
+ * prompt: "A professional cooking tutorial scene",
2385
+ * model: "fal-ai/kling-video/v2.6/pro/text-to-video",
2386
+ * negative_prompt: "blur, distort, low quality",
2387
+ * cfg_scale: 0.7
2388
+ * });
2389
+ * console.log("Credits charged:", usage?.creditsCharged);
2390
+ * ```
2391
+ *
2392
+ * @returns Promise<VideoGenerationResponse> - Object containing:
2393
+ * - `result.video.url`: URL to the generated video
2394
+ * - `result.video.content_type`: MIME type (video/mp4)
2395
+ * - `result.video.file_name`: Generated filename
2396
+ * - `result.video.file_size`: File size in bytes
2397
+ * - `metadata`: Generation metadata (projectId, timestamp, model)
2398
+ * - `usage`: Credits charged and cost information
2399
+ */
2400
+ generateVideo(options: VideoGenerationRequest): Promise<VideoGenerationResponse>;
2124
2401
  /**
2125
2402
  * Converts text to speech using AI voice synthesis models.
2126
2403
  *
@@ -2296,4 +2573,14 @@ declare class BlinkRealtimeImpl implements BlinkRealtime {
2296
2573
  onPresence(channelName: string, callback: (users: PresenceUser[]) => void): () => void;
2297
2574
  }
2298
2575
 
2299
- export { type AnalyticsEvent, AsyncStorageAdapter, type AuthState, type AuthStateChangeCallback, type AuthTokens, type BlinkAI, BlinkAIImpl, type BlinkAnalytics, BlinkAnalyticsImpl, type BlinkClient, type BlinkClientConfig, type BlinkData, BlinkDataImpl, BlinkDatabase, type BlinkRealtime, BlinkRealtimeChannel, BlinkRealtimeError, BlinkRealtimeImpl, type BlinkStorage, BlinkStorageImpl, BlinkTable, type BlinkUser, type CreateOptions, type DataExtraction, type FileObject, type FilterCondition, type ImageGenerationRequest, type ImageGenerationResponse, type Message, NoOpStorageAdapter, type ObjectGenerationRequest, type ObjectGenerationResponse, type PresenceUser, type QueryOptions, type RealtimeChannel, type RealtimeGetMessagesOptions, type RealtimeMessage, type RealtimePublishOptions, type RealtimeSubscribeOptions, type SearchRequest, type SearchResponse, type SpeechGenerationRequest, type SpeechGenerationResponse, type StorageAdapter, type StorageUploadOptions, type StorageUploadResponse, type TableOperations, type TextGenerationRequest, type TextGenerationResponse, type TokenUsage, type TranscriptionRequest, type TranscriptionResponse, type UpdateOptions, type UpsertOptions, type WebBrowserModule, WebStorageAdapter, createClient, getDefaultStorageAdapter, isBrowser, isNode, isReactNative, isWeb, platform };
2576
+ declare class BlinkConnectorsImpl implements BlinkConnectors {
2577
+ private httpClient;
2578
+ constructor(httpClient: HttpClient);
2579
+ status(provider: ConnectorProvider, options?: {
2580
+ account_id?: string;
2581
+ }): Promise<ConnectorStatusResponse>;
2582
+ execute<TParams = Record<string, unknown>, TData = any>(provider: ConnectorProvider, request: ConnectorExecuteRequest<TParams>): Promise<ConnectorExecuteResponse<TData>>;
2583
+ saveApiKey<TMetadata = Record<string, unknown>>(provider: ConnectorProvider, request: ConnectorApiKeyRequest<TMetadata>): Promise<ConnectorApiKeyResponse>;
2584
+ }
2585
+
2586
+ export { type AnalyticsEvent, AsyncStorageAdapter, type AuthState, type AuthStateChangeCallback, type AuthTokens, type BlinkAI, BlinkAIImpl, type BlinkAnalytics, BlinkAnalyticsImpl, type BlinkClient, type BlinkClientConfig, BlinkConnectorError, type BlinkConnectors, BlinkConnectorsImpl, type BlinkData, BlinkDataImpl, BlinkDatabase, type BlinkRealtime, BlinkRealtimeChannel, BlinkRealtimeError, BlinkRealtimeImpl, type BlinkStorage, BlinkStorageImpl, BlinkTable, type BlinkUser, type ConnectorApiKeyRequest, type ConnectorApiKeyResponse, type ConnectorAuthMode, type ConnectorExecuteRequest, type ConnectorExecuteResponse, type ConnectorProvider, type ConnectorStatusResponse, type CreateOptions, type DataExtraction, type FileObject, type FilterCondition, type ImageGenerationRequest, type ImageGenerationResponse, type Message, NoOpStorageAdapter, type ObjectGenerationRequest, type ObjectGenerationResponse, type PresenceUser, type QueryOptions, type RealtimeChannel, type RealtimeGetMessagesOptions, type RealtimeMessage, type RealtimePublishOptions, type RealtimeSubscribeOptions, type SearchRequest, type SearchResponse, type SpeechGenerationRequest, type SpeechGenerationResponse, type StorageAdapter, type StorageUploadOptions, type StorageUploadResponse, type TableOperations, type TextGenerationRequest, type TextGenerationResponse, type TokenUsage, type TranscriptionRequest, type TranscriptionResponse, type UpdateOptions, type UpsertOptions, type WebBrowserModule, WebStorageAdapter, createClient, getDefaultStorageAdapter, isBrowser, isDeno, isNode, isReactNative, isServer, isWeb, platform };