voice-router-dev 0.1.9 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/router/voice-router.ts","../src/generated/deepgram/schema/listenV1EncodingParameter.ts","../src/generated/deepgram/schema/speakV1EncodingParameter.ts","../src/generated/deepgram/schema/speakV1ContainerParameter.ts","../src/generated/deepgram/schema/speakV1SampleRateParameter.ts","../src/generated/gladia/schema/streamingSupportedEncodingEnum.ts","../src/generated/gladia/schema/streamingSupportedSampleRateEnum.ts","../src/generated/gladia/schema/streamingSupportedBitDepthEnum.ts","../src/constants/defaults.ts","../src/utils/errors.ts","../src/adapters/base-adapter.ts","../src/adapters/gladia-adapter.ts","../src/router/audio-encoding-types.ts","../src/utils/websocket-helpers.ts","../src/utils/validation.ts","../src/utils/transcription-helpers.ts","../src/generated/gladia/api/gladiaControlAPI.ts","../src/generated/gladia/schema/index.ts","../src/generated/gladia/schema/audioChunkAckMessageType.ts","../src/generated/gladia/schema/audioChunkActionType.ts","../src/generated/gladia/schema/audioToTextControllerAudioTranscriptionBodyLanguage.ts","../src/generated/gladia/schema/audioToTextControllerAudioTranscriptionBodyLanguageBehaviour.ts","../src/generated/gladia/schema/audioToTextControllerAudioTranscriptionBodyOutputFormat.ts","../src/generated/gladia/schema/audioToTextControllerAudioTranscriptionBodyTargetTranslationLanguage.ts","../src/generated/gladia/schema/callbackLiveAudioChunkAckMessageEvent.ts","../src/generated/gladia/schema/callbackLiveEndRecordingMessageEvent.ts","../src/generated/gladia/schema/callbackLiveEndSessionMessageEvent.ts","../src/generated/gladia/schema/callbackLiveNamedEntityRecognitionMessageEvent.ts","../src/generated/gladia/schema/callbackLivePostChapterizationMessageEvent.ts","../src/generated/gladia/schema/callbackLivePostFinalTranscriptMessageEvent.ts","../src/generated/gladia/schema/callbackLivePostSummarizationMessageEvent.ts","../src/generated/gladia/schema/callbackLivePostTranscriptMessageEvent.ts","../src/generated/gladia/schema/callbackLiveSentimentAnalysisMessageEvent.ts","../src/generated/gladia/schema/callbackLiveSpeechEndMessageEvent.ts","../src/generated/gladia/schema/callbackLiveSpeechStartMessageEvent.ts","../src/generated/gladia/schema/callbackLiveStartRecordingMessageEvent.ts","../src/generated/gladia/schema/callbackLiveStartSessionMessageEvent.ts","../src/generated/gladia/schema/callbackLiveStopRecordingAckMessageEvent.ts","../src/generated/gladia/schema/callbackLiveTranscriptMessageEvent.ts","../src/generated/gladia/schema/callbackLiveTranslationMessageEvent.ts","../src/generated/gladia/schema/callbackMethodEnum.ts","../src/generated/gladia/schema/callbackTranscriptionErrorPayloadEvent.ts","../src/generated/gladia/schema/callbackTranscriptionSuccessPayloadEvent.ts","../src/generated/gladia/schema/endRecordingMessageType.ts","../src/generated/gladia/schema/endSessionMessageType.ts","../src/generated/gladia/schema/historyControllerGetListV1KindItem.ts","../src/generated/gladia/schema/historyControllerGetListV1StatusItem.ts","../src/generated/gladia/schema/namedEntityRecognitionMessageType.ts","../src/generated/gladia/schema/postChapterizationMessageType.ts","../src/generated/gladia/schema/postFinalTranscriptMessageType.ts","../src/generated/gladia/schema/postSummarizationMessageType.ts","../src/generated/gladia/schema/postTranscriptMessageType.ts","../src/generated/gladia/schema/preRecordedControllerGetPreRecordedJobsV2StatusItem.ts","../src/generated/gladia/schema/preRecordedResponseKind.ts","../src/generated/gladia/schema/preRecordedResponseStatus.ts","../src/generated/gladia/schema/sentimentAnalysisMessageType.ts","../src/generated/gladia/schema/speechEndMessageType.ts","../src/generated/gladia/schema/speechStartMessageType.ts","../src/generated/gladia/schema/startRecordingMessageType.ts","../src/generated/gladia/schema/startSessionMessageType.ts","../src/generated/gladia/schema/stopRecordingAckMessageType.ts","../src/generated/gladia/schema/stopRecordingActionType.ts","../src/generated/gladia/schema/streamingControllerGetStreamingJobsV2StatusItem.ts","../src/generated/gladia/schema/streamingResponseKind.ts","../src/generated/gladia/schema/streamingResponseStatus.ts","../src/generated/gladia/schema/streamingSupportedModels.ts","../src/generated/gladia/schema/streamingSupportedRegions.ts","../src/generated/gladia/schema/subtitlesFormatEnum.ts","../src/generated/gladia/schema/subtitlesStyleEnum.ts","../src/generated/gladia/schema/summaryTypesEnum.ts","../src/generated/gladia/schema/transcriptionControllerListV2KindItem.ts","../src/generated/gladia/schema/transcriptionControllerListV2StatusItem.ts","../src/generated/gladia/schema/transcriptionLanguageCodeEnum.ts","../src/generated/gladia/schema/transcriptMessageType.ts","../src/generated/gladia/schema/translationLanguageCodeEnum.ts","../src/generated/gladia/schema/translationMessageType.ts","../src/generated/gladia/schema/translationModelEnum.ts","../src/generated/gladia/schema/videoToTextControllerVideoTranscriptionBodyLanguage.ts","../src/generated/gladia/schema/videoToTextControllerVideoTranscriptionBodyLanguageBehaviour.ts","../src/generated/gladia/schema/videoToTextControllerVideoTranscriptionBodyOutputFormat.ts","../src/generated/gladia/schema/videoToTextControllerVideoTranscriptionBodyTargetTranslationLanguage.ts","../src/generated/gladia/schema/webhookLiveEndRecordingPayloadEvent.ts","../src/generated/gladia/schema/webhookLiveEndSessionPayloadEvent.ts","../src/generated/gladia/schema/webhookLiveStartRecordingPayloadEvent.ts","../src/generated/gladia/schema/webhookLiveStartSessionPayloadEvent.ts","../src/generated/gladia/schema/webhookTranscriptionCreatedPayloadEvent.ts","../src/generated/gladia/schema/webhookTranscriptionErrorPayloadEvent.ts","../src/generated/gladia/schema/webhookTranscriptionSuccessPayloadEvent.ts","../src/adapters/assemblyai-adapter.ts","../src/generated/assemblyai/api/assemblyAIAPI.ts","../src/generated/assemblyai/schema/index.ts","../src/generated/assemblyai/schema/audioIntelligenceModelStatus.ts","../src/generated/assemblyai/schema/entityType.ts","../src/generated/assemblyai/schema/lemurModel.ts","../src/generated/assemblyai/schema/piiPolicy.ts","../src/generated/assemblyai/schema/redactedAudioStatus.ts","../src/generated/assemblyai/schema/redactPiiAudioQuality.ts","../src/generated/assemblyai/schema/sentiment.ts","../src/generated/assemblyai/schema/speechModel.ts","../src/generated/assemblyai/schema/substitutionPolicy.ts","../src/generated/assemblyai/schema/subtitleFormat.ts","../src/generated/assemblyai/schema/summaryModel.ts","../src/generated/assemblyai/schema/summaryType.ts","../src/generated/assemblyai/schema/transcriptBoostParam.ts","../src/generated/assemblyai/schema/transcriptLanguageCode.ts","../src/generated/assemblyai/schema/transcriptReadyStatus.ts","../src/generated/assemblyai/schema/transcriptStatus.ts","../src/adapters/deepgram-adapter.ts","../src/adapters/azure-stt-adapter.ts","../src/generated/azure/api/speechServicesAPIV31.ts","../src/adapters/openai-whisper-adapter.ts","../src/generated/openai/api/openAIAPI.ts","../src/adapters/speechmatics-adapter.ts","../src/webhooks/base-webhook.ts","../src/webhooks/gladia-webhook.ts","../src/webhooks/assemblyai-webhook.ts","../src/webhooks/deepgram-webhook.ts","../src/webhooks/azure-webhook.ts","../src/webhooks/speechmatics-webhook.ts","../src/webhooks/webhook-router.ts"],"sourcesContent":["/**\n * Voice Router SDK - Multi-Provider Transcription API\n * Unified interface for Gladia, AssemblyAI, Deepgram, and more\n */\n\n// Main Voice Router exports\nexport * from \"./router\"\nexport * from \"./adapters\"\n\n// Webhook normalization exports\nexport * from \"./webhooks\"\n\n// Provider-specific generated types (for advanced usage)\nexport * as GladiaTypes from \"./generated/gladia/schema\"\nexport * as AssemblyAITypes from \"./generated/assemblyai/schema\"\n","/**\n * VoiceRouter - Unified transcription API bridge\n * Provides a provider-agnostic interface for multiple Speech-to-Text services\n */\n\nimport type { TranscriptionAdapter, ProviderConfig } from \"../adapters/base-adapter\"\nimport type {\n AudioInput,\n StreamEvent,\n StreamingCallbacks,\n StreamingOptions,\n StreamingSession,\n TranscribeOptions,\n TranscriptionProvider,\n UnifiedTranscriptResponse\n} from \"./types\"\nimport type {\n GladiaStreamingOptions,\n DeepgramStreamingOptions,\n AssemblyAIStreamingOptions\n} from \"./provider-streaming-types\"\n\n/**\n * Configuration for VoiceRouter\n */\nexport interface VoiceRouterConfig {\n /**\n * Provider configurations\n * Key: provider name, Value: provider config\n */\n providers: Partial<Record<TranscriptionProvider, ProviderConfig>>\n\n /**\n * Default provider to use when not specified\n */\n defaultProvider?: TranscriptionProvider\n\n /**\n * Strategy for provider selection when multiple providers are configured\n * - 'explicit': Always require provider to be specified (throws error if not)\n * - 'default': Use defaultProvider if not specified\n * - 'round-robin': Rotate between providers for load balancing\n * - 'fastest': Choose provider with lowest current queue (future feature)\n */\n selectionStrategy?: \"explicit\" | \"default\" | \"round-robin\"\n}\n\n/**\n * VoiceRouter - Main class for provider-agnostic transcription\n *\n * Provides a unified interface across multiple Speech-to-Text providers\n * (Gladia, AssemblyAI, Deepgram, etc.). Automatically handles provider\n * selection, adapter management, and response normalization.\n *\n * @example Basic usage with single provider\n * ```typescript\n * import { VoiceRouter, GladiaAdapter } from '@meeting-baas/sdk';\n *\n * const router = new VoiceRouter({\n * providers: {\n * gladia: { apiKey: process.env.GLADIA_API_KEY }\n * },\n * defaultProvider: 'gladia'\n * });\n *\n * router.registerAdapter(new GladiaAdapter());\n *\n * const result = await router.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * });\n *\n * console.log(result.data.text);\n * ```\n *\n * @example Multi-provider with round-robin\n * ```typescript\n * const router = new VoiceRouter({\n * providers: {\n * gladia: { apiKey: process.env.GLADIA_API_KEY },\n * assemblyai: { apiKey: process.env.ASSEMBLYAI_API_KEY }\n * },\n * selectionStrategy: 'round-robin'\n * });\n *\n * router.registerAdapter(new GladiaAdapter());\n * router.registerAdapter(new AssemblyAIAdapter());\n *\n * // Automatically alternates between providers\n * await router.transcribe(audio1); // Uses Gladia\n * await router.transcribe(audio2); // Uses AssemblyAI\n * await router.transcribe(audio3); // Uses Gladia again\n * ```\n */\nexport class VoiceRouter {\n private adapters: Map<TranscriptionProvider, TranscriptionAdapter> = new Map()\n private config: VoiceRouterConfig\n private roundRobinIndex = 0\n\n constructor(config: VoiceRouterConfig) {\n this.config = {\n selectionStrategy: \"default\",\n ...config\n }\n\n // Validate configuration\n if (Object.keys(config.providers).length === 0) {\n throw new Error(\"VoiceRouter requires at least one provider configuration\")\n }\n\n // If using default strategy, ensure a default provider is set\n if (this.config.selectionStrategy === \"default\" && !this.config.defaultProvider) {\n // Auto-select first provider as default\n this.config.defaultProvider = Object.keys(config.providers)[0] as TranscriptionProvider\n }\n }\n\n /**\n * Register an adapter for a provider\n *\n * Call this method for each provider you want to use. The adapter will be\n * initialized with the configuration provided in the constructor.\n *\n * @param adapter - Provider adapter instance to register\n * @throws {Error} If no configuration found for the provider\n *\n * @example\n * ```typescript\n * const router = new VoiceRouter({\n * providers: {\n * gladia: { apiKey: 'YOUR_KEY' }\n * }\n * });\n *\n * router.registerAdapter(new GladiaAdapter());\n * ```\n */\n registerAdapter(adapter: TranscriptionAdapter): void {\n // Initialize adapter with config\n const providerConfig = this.config.providers[adapter.name]\n if (!providerConfig) {\n throw new Error(`No configuration found for provider: ${adapter.name}`)\n }\n\n adapter.initialize(providerConfig)\n this.adapters.set(adapter.name, adapter)\n }\n\n /**\n * Get an adapter by provider name\n */\n getAdapter(provider: TranscriptionProvider): TranscriptionAdapter {\n const adapter = this.adapters.get(provider)\n if (!adapter) {\n throw new Error(\n `Provider '${provider}' is not registered. Available providers: ${Array.from(this.adapters.keys()).join(\", \")}`\n )\n }\n return adapter\n }\n\n /**\n * Select provider based on configured strategy\n */\n private selectProvider(preferredProvider?: TranscriptionProvider): TranscriptionProvider {\n // If provider explicitly specified, use it\n if (preferredProvider) {\n if (!this.adapters.has(preferredProvider)) {\n throw new Error(\n `Provider '${preferredProvider}' is not registered. Available providers: ${Array.from(this.adapters.keys()).join(\", \")}`\n )\n }\n return preferredProvider\n }\n\n // Apply selection strategy\n switch (this.config.selectionStrategy) {\n case \"explicit\":\n throw new Error(\n \"Provider must be explicitly specified when using 'explicit' selection strategy\"\n )\n\n case \"round-robin\": {\n const providers = Array.from(this.adapters.keys())\n const provider = providers[this.roundRobinIndex % providers.length]\n this.roundRobinIndex++\n return provider\n }\n\n case \"default\":\n default:\n if (!this.config.defaultProvider) {\n throw new Error(\"No default provider configured\")\n }\n return this.config.defaultProvider\n }\n }\n\n /**\n * Transcribe audio using a specific provider or the default\n *\n * Submit audio for transcription. The provider will be selected based on\n * your configuration strategy (explicit, default, or round-robin).\n *\n * @param audio - Audio input (URL, file buffer, or stream)\n * @param options - Transcription options (language, diarization, etc.)\n * @param options.provider - Specific provider to use (overrides selection strategy)\n * @returns Unified transcription response with normalized format\n * @throws {Error} If provider not registered or selection fails\n *\n * @example URL audio\n * ```typescript\n * const result = await router.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en',\n * diarization: true,\n * summarization: true\n * });\n *\n * if (result.success) {\n * console.log('Transcript:', result.data.text);\n * console.log('Speakers:', result.data.speakers);\n * console.log('Summary:', result.data.summary);\n * }\n * ```\n *\n * @example Specific provider\n * ```typescript\n * const result = await router.transcribe(audio, {\n * provider: 'gladia', // Force use of Gladia\n * language: 'en'\n * });\n * ```\n */\n async transcribe(\n audio: AudioInput,\n options?: TranscribeOptions & { provider?: TranscriptionProvider }\n ): Promise<UnifiedTranscriptResponse> {\n const provider = this.selectProvider(options?.provider)\n const adapter = this.getAdapter(provider)\n\n // Remove provider from options before passing to adapter\n const { provider: _, ...adapterOptions } = options || {}\n\n return adapter.transcribe(audio, adapterOptions)\n }\n\n /**\n * Get transcription result by ID\n * Provider must be specified since IDs are provider-specific\n */\n async getTranscript(\n transcriptId: string,\n provider: TranscriptionProvider\n ): Promise<UnifiedTranscriptResponse> {\n const adapter = this.getAdapter(provider)\n return adapter.getTranscript(transcriptId)\n }\n\n /**\n * Stream audio for real-time transcription with Gladia\n *\n * @param options - Gladia-specific streaming options (type-safe from OpenAPI spec)\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n *\n * @example Gladia streaming (type-safe!)\n * ```typescript\n * const session = await router.transcribeStream({\n * provider: 'gladia',\n * encoding: 'wav/pcm', // ✅ Only Gladia encodings allowed\n * sampleRate: 16000, // ✅ Only 8000, 16000, 32000, 44100, 48000\n * channels: 1\n * }, {\n * onTranscript: (event) => console.log(event.text),\n * onError: (error) => console.error(error)\n * });\n * ```\n */\n transcribeStream(\n options: GladiaStreamingOptions & { provider: \"gladia\" },\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession>\n\n /**\n * Stream audio for real-time transcription with Deepgram\n *\n * @param options - Deepgram-specific streaming options (type-safe from OpenAPI spec)\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n *\n * @example Deepgram streaming (type-safe!)\n * ```typescript\n * const session = await router.transcribeStream({\n * provider: 'deepgram',\n * encoding: 'linear16', // ✅ Only Deepgram encodings allowed\n * sampleRate: 16000,\n * language: 'en',\n * diarization: true\n * }, {\n * onTranscript: (event) => console.log(event.text)\n * });\n * ```\n */\n transcribeStream(\n options: DeepgramStreamingOptions & { provider: \"deepgram\" },\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession>\n\n /**\n * Stream audio for real-time transcription with AssemblyAI\n *\n * @param options - AssemblyAI-specific streaming options (type-safe from OpenAPI spec)\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n *\n * @example AssemblyAI streaming (type-safe!)\n * ```typescript\n * const session = await router.transcribeStream({\n * provider: 'assemblyai',\n * sampleRate: 16000 // ✅ Only supported sample rates\n * }, {\n * onTranscript: (event) => console.log(event.text)\n * });\n * ```\n */\n transcribeStream(\n options: AssemblyAIStreamingOptions & { provider: \"assemblyai\" },\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession>\n\n /**\n * Stream audio for real-time transcription (uses default provider)\n *\n * @param options - Generic streaming options\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n */\n transcribeStream(\n options?: StreamingOptions,\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession>\n\n // Implementation\n async transcribeStream(\n options?:\n | (GladiaStreamingOptions & { provider: \"gladia\" })\n | (DeepgramStreamingOptions & { provider: \"deepgram\" })\n | (AssemblyAIStreamingOptions & { provider: \"assemblyai\" })\n | (StreamingOptions & { provider?: TranscriptionProvider }),\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession> {\n const provider = this.selectProvider(options?.provider)\n const adapter = this.getAdapter(provider)\n\n // Check if adapter supports streaming\n if (!adapter.capabilities.streaming || !adapter.transcribeStream) {\n throw new Error(`Provider '${provider}' does not support streaming transcription`)\n }\n\n // Remove provider from options before passing to adapter\n // Cast to StreamingOptions since adapter will handle provider-specific conversions\n const { provider: _, ...adapterOptions } = options || {}\n\n return adapter.transcribeStream(adapterOptions as StreamingOptions, callbacks)\n }\n\n /**\n * Delete a transcription\n * Not all providers support this operation\n */\n async deleteTranscript(\n transcriptId: string,\n provider: TranscriptionProvider\n ): Promise<{ success: boolean }> {\n const adapter = this.getAdapter(provider)\n\n if (!adapter.deleteTranscript) {\n throw new Error(`Provider '${provider}' does not support deleting transcripts`)\n }\n\n return adapter.deleteTranscript(transcriptId)\n }\n\n /**\n * List recent transcriptions\n * Not all providers support this operation\n */\n async listTranscripts(\n provider: TranscriptionProvider,\n options?: {\n limit?: number\n offset?: number\n status?: string\n }\n ): Promise<{\n transcripts: UnifiedTranscriptResponse[]\n total?: number\n hasMore?: boolean\n }> {\n const adapter = this.getAdapter(provider)\n\n if (!adapter.listTranscripts) {\n throw new Error(`Provider '${provider}' does not support listing transcripts`)\n }\n\n return adapter.listTranscripts(options)\n }\n\n /**\n * Get capabilities for a specific provider\n */\n getProviderCapabilities(provider: TranscriptionProvider) {\n const adapter = this.getAdapter(provider)\n return adapter.capabilities\n }\n\n /**\n * Get all registered providers\n */\n getRegisteredProviders(): TranscriptionProvider[] {\n return Array.from(this.adapters.keys())\n }\n\n /**\n * Get raw provider client for advanced usage\n */\n getRawProviderClient(provider: TranscriptionProvider): unknown {\n const adapter = this.getAdapter(provider)\n\n if (!adapter.getRawClient) {\n throw new Error(`Provider '${provider}' does not expose a raw client`)\n }\n\n return adapter.getRawClient()\n }\n}\n\n/**\n * Factory function to create a VoiceRouter with auto-registered adapters\n */\nexport function createVoiceRouter(\n config: VoiceRouterConfig,\n adapters?: TranscriptionAdapter[]\n): VoiceRouter {\n const router = new VoiceRouter(config)\n\n // Register provided adapters\n if (adapters && adapters.length > 0) {\n for (const adapter of adapters) {\n router.registerAdapter(adapter)\n }\n }\n\n return router\n}\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Deepgram API Specification\n * APIs for speech-to-text transcription, text-to-speech synthesis, language understanding, and account management.\n\n * OpenAPI spec version: 1.0.0\n */\n\n/**\n * ListenV1EncodingParameter type definition\n */\n\n/**\n * ListenV1EncodingParameter type definition\n */\n\n/**\n * ListenV1EncodingParameter type definition\n */\n\n/**\n * ListenV1EncodingParameter type definition\n */\n\n/**\n * ListenV1EncodingParameter type definition\n */\n\n/**\n * ListenV1EncodingParameter type definition\n */\nexport type ListenV1EncodingParameter = typeof ListenV1EncodingParameter[keyof typeof ListenV1EncodingParameter];\n\nexport const ListenV1EncodingParameter = {\n linear16: \"linear16\",\n flac: \"flac\",\n mulaw: \"mulaw\",\n opus: \"opus\",\n speex: \"speex\",\n g729: \"g729\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Deepgram API Specification\n * APIs for speech-to-text transcription, text-to-speech synthesis, language understanding, and account management.\n\n * OpenAPI spec version: 1.0.0\n */\n\n/**\n * SpeakV1EncodingParameter type definition\n */\n\n/**\n * SpeakV1EncodingParameter type definition\n */\n\n/**\n * SpeakV1EncodingParameter type definition\n */\n\n/**\n * SpeakV1EncodingParameter type definition\n */\n\n/**\n * SpeakV1EncodingParameter type definition\n */\nexport type SpeakV1EncodingParameter = typeof SpeakV1EncodingParameter[keyof typeof SpeakV1EncodingParameter];\n\nexport const SpeakV1EncodingParameter = {\n linear16: \"linear16\",\n aac: \"aac\",\n opus: \"opus\",\n mp3: \"mp3\",\n flac: \"flac\",\n mulaw: \"mulaw\",\n alaw: \"alaw\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Deepgram API Specification\n * APIs for speech-to-text transcription, text-to-speech synthesis, language understanding, and account management.\n\n * OpenAPI spec version: 1.0.0\n */\n\n/**\n * SpeakV1ContainerParameter type definition\n */\n\n/**\n * SpeakV1ContainerParameter type definition\n */\n\n/**\n * SpeakV1ContainerParameter type definition\n */\n\n/**\n * SpeakV1ContainerParameter type definition\n */\n\n/**\n * SpeakV1ContainerParameter type definition\n */\nexport type SpeakV1ContainerParameter = typeof SpeakV1ContainerParameter[keyof typeof SpeakV1ContainerParameter];\n\nexport const SpeakV1ContainerParameter = {\n none: \"none\",\n wav: \"wav\",\n ogg: \"ogg\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Deepgram API Specification\n * APIs for speech-to-text transcription, text-to-speech synthesis, language understanding, and account management.\n\n * OpenAPI spec version: 1.0.0\n */\n\n/**\n * SpeakV1SampleRateParameter type definition\n */\n\n/**\n * SpeakV1SampleRateParameter type definition\n */\n\n/**\n * SpeakV1SampleRateParameter type definition\n */\n\n/**\n * SpeakV1SampleRateParameter type definition\n */\n\n/**\n * SpeakV1SampleRateParameter type definition\n */\n\n/**\n * SpeakV1SampleRateParameter type definition\n */\nexport type SpeakV1SampleRateParameter = typeof SpeakV1SampleRateParameter[keyof typeof SpeakV1SampleRateParameter];\n\nexport const SpeakV1SampleRateParameter = {\n NUMBER_16000: 16000,\n NUMBER_24000: 24000,\n NUMBER_32000: 32000,\n NUMBER_48000: 48000,\n null: null,\n NUMBER_8000: 8000,\n NUMBER_22050: 22050\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * The encoding format of the audio stream. Supported formats: \n- PCM: 8, 16, 24, and 32 bits \n- A-law: 8 bits \n- μ-law: 8 bits \n\nNote: No need to add WAV headers to raw audio as the API supports both formats.\n */\nexport type StreamingSupportedEncodingEnum =\n (typeof StreamingSupportedEncodingEnum)[keyof typeof StreamingSupportedEncodingEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingSupportedEncodingEnum = {\n \"wav/pcm\": \"wav/pcm\",\n \"wav/alaw\": \"wav/alaw\",\n \"wav/ulaw\": \"wav/ulaw\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * The sample rate of the audio stream\n */\nexport type StreamingSupportedSampleRateEnum =\n (typeof StreamingSupportedSampleRateEnum)[keyof typeof StreamingSupportedSampleRateEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingSupportedSampleRateEnum = {\n NUMBER_8000: 8000,\n NUMBER_16000: 16000,\n NUMBER_32000: 32000,\n NUMBER_44100: 44100,\n NUMBER_48000: 48000\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * The bit depth of the audio stream\n */\nexport type StreamingSupportedBitDepthEnum =\n (typeof StreamingSupportedBitDepthEnum)[keyof typeof StreamingSupportedBitDepthEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingSupportedBitDepthEnum = {\n NUMBER_8: 8,\n NUMBER_16: 16,\n NUMBER_24: 24,\n NUMBER_32: 32\n} as const\n","/**\n * Default configuration constants for Voice Router SDK\n *\n * These constants provide sensible defaults for timeouts, polling intervals,\n * and other configuration values used across all adapters.\n */\n\n/**\n * Default timeout values for different operation types (in milliseconds)\n */\nexport const DEFAULT_TIMEOUTS = {\n /** Standard HTTP request timeout for API calls (60 seconds) */\n HTTP_REQUEST: 60000,\n\n /** Audio processing timeout for long audio files (120 seconds) */\n AUDIO_PROCESSING: 120000,\n\n /** WebSocket connection establishment timeout (10 seconds) */\n WS_CONNECTION: 10000,\n\n /** WebSocket graceful close timeout (5 seconds) */\n WS_CLOSE: 5000\n} as const\n\n/**\n * Default polling configuration for async transcription jobs\n */\nexport const DEFAULT_POLLING = {\n /** Maximum number of polling attempts before timing out */\n MAX_ATTEMPTS: 60,\n\n /** Standard interval between polling attempts (2 seconds) */\n INTERVAL_MS: 2000,\n\n /** Slower interval for long-running jobs (3 seconds) */\n SLOW_INTERVAL_MS: 3000\n} as const\n","/**\n * Standardized error handling utilities for Voice Router SDK\n *\n * Provides consistent error codes, messages, and formatting across all adapters.\n */\n\n/**\n * Standard error codes used across all providers\n *\n * These codes provide a consistent error taxonomy regardless of which\n * provider is being used.\n */\nexport const ERROR_CODES = {\n /** Failed to parse API response or WebSocket message */\n PARSE_ERROR: \"PARSE_ERROR\",\n\n /** WebSocket connection error */\n WEBSOCKET_ERROR: \"WEBSOCKET_ERROR\",\n\n /** Async transcription job did not complete within timeout */\n POLLING_TIMEOUT: \"POLLING_TIMEOUT\",\n\n /** Transcription processing failed on provider side */\n TRANSCRIPTION_ERROR: \"TRANSCRIPTION_ERROR\",\n\n /** Connection attempt timed out */\n CONNECTION_TIMEOUT: \"CONNECTION_TIMEOUT\",\n\n /** Invalid input provided to API */\n INVALID_INPUT: \"INVALID_INPUT\",\n\n /** Requested operation not supported by provider */\n NOT_SUPPORTED: \"NOT_SUPPORTED\",\n\n /** No transcription results available */\n NO_RESULTS: \"NO_RESULTS\",\n\n /** Unspecified or unknown error */\n UNKNOWN_ERROR: \"UNKNOWN_ERROR\"\n} as const\n\nexport type ErrorCode = (typeof ERROR_CODES)[keyof typeof ERROR_CODES]\n\n/**\n * Default error messages for each error code\n *\n * These can be overridden with custom messages when creating errors.\n */\nexport const ERROR_MESSAGES: Record<ErrorCode, string> = {\n PARSE_ERROR: \"Failed to parse response data\",\n WEBSOCKET_ERROR: \"WebSocket connection error\",\n POLLING_TIMEOUT: \"Transcription did not complete within timeout period\",\n TRANSCRIPTION_ERROR: \"Transcription processing failed\",\n CONNECTION_TIMEOUT: \"Connection attempt timed out\",\n INVALID_INPUT: \"Invalid input provided\",\n NOT_SUPPORTED: \"Operation not supported by this provider\",\n NO_RESULTS: \"No transcription results available\",\n UNKNOWN_ERROR: \"An unknown error occurred\"\n}\n\n/**\n * Standard error object structure\n */\nexport interface StandardError {\n /** Error code from ERROR_CODES */\n code: string\n /** Human-readable error message */\n message: string\n /** HTTP status code if applicable */\n statusCode?: number\n /** Additional error details */\n details?: unknown\n}\n\n/**\n * Create a standardized error object\n *\n * @param code - Error code from ERROR_CODES\n * @param customMessage - Optional custom message (defaults to standard message)\n * @param details - Optional additional error details\n * @returns Standardized error object\n *\n * @example\n * ```typescript\n * throw createError(ERROR_CODES.PARSE_ERROR, undefined, rawError)\n *\n * throw createError(\n * ERROR_CODES.TRANSCRIPTION_ERROR,\n * \"Audio file format not supported\",\n * { format: \"mp4\", supported: [\"wav\", \"mp3\"] }\n * )\n * ```\n */\nexport function createError(\n code: ErrorCode,\n customMessage?: string,\n details?: unknown\n): StandardError {\n return {\n code,\n message: customMessage || ERROR_MESSAGES[code],\n details\n }\n}\n\n/**\n * Create error from caught exception\n *\n * Safely extracts error information from unknown caught values.\n *\n * @param error - Caught error (any type)\n * @param defaultCode - Error code to use if not extractable\n * @param statusCode - HTTP status code if applicable\n * @returns Standardized error object\n *\n * @example\n * ```typescript\n * try {\n * await someOperation()\n * } catch (error) {\n * return { success: false, error: createErrorFromException(error) }\n * }\n * ```\n */\nexport function createErrorFromException(\n error: unknown,\n defaultCode: ErrorCode = ERROR_CODES.UNKNOWN_ERROR,\n statusCode?: number\n): StandardError {\n if (error instanceof Error) {\n const err = error as Error & { statusCode?: number; code?: string }\n return {\n code: err.code || defaultCode,\n message: err.message || ERROR_MESSAGES[defaultCode],\n statusCode: statusCode || err.statusCode,\n details: error\n }\n }\n\n return {\n code: defaultCode,\n message: String(error) || ERROR_MESSAGES[defaultCode],\n statusCode,\n details: error\n }\n}\n","/**\n * Base adapter interface for transcription providers\n * All provider adapters must implement this interface\n */\n\nimport type {\n AudioInput,\n ProviderCapabilities,\n StreamEvent,\n StreamingCallbacks,\n StreamingOptions,\n StreamingSession,\n TranscribeOptions,\n TranscriptionProvider,\n UnifiedTranscriptResponse\n} from \"../router/types\"\nimport { DEFAULT_TIMEOUTS, DEFAULT_POLLING } from \"../constants/defaults\"\nimport { ERROR_CODES, type ErrorCode } from \"../utils/errors\"\n\n/**\n * Provider configuration\n */\nexport interface ProviderConfig {\n /** API key for authentication */\n apiKey: string\n /** Base API URL (optional, uses provider default if not specified) */\n baseUrl?: string\n /** Request timeout in milliseconds */\n timeout?: number\n /** Custom headers to include in requests */\n headers?: Record<string, string>\n /** Additional provider-specific options */\n options?: Record<string, unknown>\n}\n\n/**\n * Base adapter interface that all provider adapters must implement\n */\nexport interface TranscriptionAdapter {\n /**\n * Provider name\n */\n readonly name: TranscriptionProvider\n\n /**\n * Provider capabilities\n */\n readonly capabilities: ProviderCapabilities\n\n /**\n * Initialize the adapter with configuration\n */\n initialize(config: ProviderConfig): void\n\n /**\n * Submit audio for transcription (async)\n * Returns immediately with a job ID that can be polled\n */\n transcribe(audio: AudioInput, options?: TranscribeOptions): Promise<UnifiedTranscriptResponse>\n\n /**\n * Get transcription result by ID\n * Used to poll for results after async submission\n */\n getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse>\n\n /**\n * Stream audio for real-time transcription (callback-based)\n * Only available if capabilities.streaming is true\n *\n * This method creates a streaming session that accepts audio chunks\n * and returns transcription results via callbacks.\n *\n * @param options - Streaming configuration options\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n *\n * @example\n * ```typescript\n * const session = await adapter.transcribeStream({\n * encoding: 'linear16',\n * sampleRate: 16000,\n * language: 'en'\n * }, {\n * onTranscript: (event) => console.log(event.text),\n * onError: (error) => console.error(error)\n * });\n *\n * // Send audio chunks\n * await session.sendAudio({ data: audioBuffer });\n *\n * // Close when done\n * await session.close();\n * ```\n */\n transcribeStream?(\n options?: StreamingOptions,\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession>\n\n /**\n * Stream audio for real-time transcription (async iterator)\n * Alternative streaming API that returns an async iterable\n * Only available if capabilities.streaming is true\n *\n * @deprecated Prefer transcribeStream() with callbacks for better control\n */\n transcribeStreamIterator?(\n audioStream: ReadableStream,\n options?: StreamingOptions\n ): AsyncIterable<StreamEvent>\n\n /**\n * Delete a transcription\n * Not all providers support deletion\n */\n deleteTranscript?(transcriptId: string): Promise<{ success: boolean }>\n\n /**\n * List recent transcriptions\n * Not all providers support listing\n */\n listTranscripts?(options?: { limit?: number; offset?: number; status?: string }): Promise<{\n transcripts: UnifiedTranscriptResponse[]\n total?: number\n hasMore?: boolean\n }>\n\n /**\n * Get provider-specific raw client\n * For advanced users who need direct access to provider APIs\n */\n getRawClient?(): unknown\n}\n\n/**\n * Abstract base class for adapters (optional convenience)\n * Providers can extend this or implement TranscriptionAdapter directly\n */\nexport abstract class BaseAdapter implements TranscriptionAdapter {\n abstract readonly name: TranscriptionProvider\n abstract readonly capabilities: ProviderCapabilities\n\n /**\n * Base URL for provider API (must be defined by subclass)\n */\n protected abstract baseUrl: string\n\n protected config?: ProviderConfig\n\n initialize(config: ProviderConfig): void {\n this.config = config\n }\n\n abstract transcribe(\n audio: AudioInput,\n options?: TranscribeOptions\n ): Promise<UnifiedTranscriptResponse>\n\n abstract getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse>\n\n /**\n * Helper method to create error responses with stack traces\n *\n * @param error - Error object or unknown error\n * @param statusCode - Optional HTTP status code\n * @param code - Optional error code (defaults to extracted or UNKNOWN_ERROR)\n */\n protected createErrorResponse(\n error: Error | unknown,\n statusCode?: number,\n code?: ErrorCode\n ): UnifiedTranscriptResponse {\n const err = error as Error & {\n statusCode?: number\n code?: string\n response?: { data?: any; status?: number; statusText?: string }\n }\n\n // Extract HTTP error details if present (axios errors)\n const httpStatus = statusCode || err.statusCode || err.response?.status\n const httpStatusText = err.response?.statusText\n const responseData = err.response?.data\n\n return {\n success: false,\n provider: this.name,\n error: {\n code: code || err.code || ERROR_CODES.UNKNOWN_ERROR,\n message: err.message || \"An unknown error occurred\",\n statusCode: httpStatus,\n details: {\n // Include full error object\n error: error,\n // Include stack trace if available\n stack: err.stack,\n // Include HTTP response details\n httpStatus,\n httpStatusText,\n responseData,\n // Include provider name for debugging\n provider: this.name\n }\n }\n }\n }\n\n /**\n * Helper method to validate configuration\n */\n protected validateConfig(): void {\n if (!this.config) {\n throw new Error(`Adapter ${this.name} is not initialized. Call initialize() first.`)\n }\n if (!this.config.apiKey) {\n throw new Error(`API key is required for ${this.name} provider`)\n }\n }\n\n /**\n * Build axios config for generated API client functions\n *\n * @param authHeaderName - Header name for API key (e.g., \"Authorization\", \"x-gladia-key\")\n * @param authHeaderValue - Optional function to format auth header value (defaults to raw API key)\n * @returns Axios config object\n */\n protected getAxiosConfig(\n authHeaderName: string = \"Authorization\",\n authHeaderValue?: (apiKey: string) => string\n ): {\n baseURL: string\n timeout: number\n headers: Record<string, string>\n } {\n this.validateConfig()\n\n const authValue = authHeaderValue ? authHeaderValue(this.config!.apiKey) : this.config!.apiKey\n\n return {\n baseURL: this.config!.baseUrl || this.baseUrl,\n timeout: this.config!.timeout || DEFAULT_TIMEOUTS.HTTP_REQUEST,\n headers: {\n [authHeaderName]: authValue,\n \"Content-Type\": \"application/json\",\n ...this.config!.headers\n }\n }\n }\n\n /**\n * Generic polling helper for async transcription jobs\n *\n * Polls getTranscript() until job completes or times out.\n *\n * @param transcriptId - Job/transcript ID to poll\n * @param options - Polling configuration\n * @returns Final transcription result\n */\n protected async pollForCompletion(\n transcriptId: string,\n options?: {\n maxAttempts?: number\n intervalMs?: number\n }\n ): Promise<UnifiedTranscriptResponse> {\n const { maxAttempts = DEFAULT_POLLING.MAX_ATTEMPTS, intervalMs = DEFAULT_POLLING.INTERVAL_MS } =\n options || {}\n\n for (let attempt = 0; attempt < maxAttempts; attempt++) {\n const result = await this.getTranscript(transcriptId)\n\n if (!result.success) {\n return result\n }\n\n const status = result.data?.status\n if (status === \"completed\") {\n return result\n }\n\n if (status === \"error\") {\n return this.createErrorResponse(\n new Error(\"Transcription failed\"),\n undefined,\n ERROR_CODES.TRANSCRIPTION_ERROR\n )\n }\n\n await new Promise((resolve) => setTimeout(resolve, intervalMs))\n }\n\n return {\n success: false,\n provider: this.name,\n error: {\n code: ERROR_CODES.POLLING_TIMEOUT,\n message: `Transcription did not complete after ${maxAttempts} attempts`\n }\n }\n }\n}\n","/**\n * Gladia transcription provider adapter\n * Documentation: https://docs.gladia.io/\n */\n\nimport axios from \"axios\"\nimport WebSocket from \"ws\"\nimport type {\n AudioChunk,\n AudioInput,\n ProviderCapabilities,\n StreamingCallbacks,\n StreamingOptions,\n StreamingSession,\n TranscribeOptions,\n UnifiedTranscriptResponse\n} from \"../router/types\"\nimport { mapEncodingToProvider } from \"../router/audio-encoding-types\"\nimport { BaseAdapter, type ProviderConfig } from \"./base-adapter\"\n\n// Import utilities\nimport { ERROR_CODES } from \"../utils/errors\"\nimport {\n waitForWebSocketOpen,\n closeWebSocket,\n setupWebSocketHandlers,\n validateSessionForAudio\n} from \"../utils/websocket-helpers\"\nimport { validateEnumValue } from \"../utils/validation\"\nimport {\n extractSpeakersFromUtterances,\n extractWords as extractWordsUtil,\n normalizeStatus\n} from \"../utils/transcription-helpers\"\nimport type { SessionStatus } from \"../router/types\"\n\n// Import generated API client functions - FULL TYPE SAFETY!\nimport {\n preRecordedControllerInitPreRecordedJobV2,\n preRecordedControllerGetPreRecordedJobV2,\n streamingControllerInitStreamingSessionV2\n} from \"../generated/gladia/api/gladiaControlAPI\"\n\n// Import Gladia generated types\nimport type { InitTranscriptionRequest } from \"../generated/gladia/schema/initTranscriptionRequest\"\nimport type { PreRecordedResponse } from \"../generated/gladia/schema/preRecordedResponse\"\nimport type { StreamingRequest } from \"../generated/gladia/schema/streamingRequest\"\nimport type { TranscriptionDTO } from \"../generated/gladia/schema/transcriptionDTO\"\nimport type { UtteranceDTO } from \"../generated/gladia/schema/utteranceDTO\"\nimport type { WordDTO } from \"../generated/gladia/schema/wordDTO\"\n// WebSocket message types for type-safe parsing\nimport type { TranscriptMessage } from \"../generated/gladia/schema/transcriptMessage\"\n// Import Gladia's supported values from OpenAPI-generated schema (type safety!)\nimport { StreamingSupportedSampleRateEnum } from \"../generated/gladia/schema/streamingSupportedSampleRateEnum\"\nimport type { StreamingSupportedEncodingEnum } from \"../generated/gladia/schema/streamingSupportedEncodingEnum\"\nimport type { TranscriptionLanguageCodeEnum } from \"../generated/gladia/schema/transcriptionLanguageCodeEnum\"\n\n/**\n * Gladia transcription provider adapter\n *\n * Implements transcription for the Gladia API with support for:\n * - Synchronous and asynchronous transcription\n * - Speaker diarization (identifying different speakers)\n * - Multi-language detection and transcription\n * - Summarization and sentiment analysis\n * - Custom vocabulary boosting\n * - Word-level timestamps\n *\n * @see https://docs.gladia.io/ Gladia API Documentation\n *\n * @example Basic transcription\n * ```typescript\n * import { GladiaAdapter } from '@meeting-baas/sdk';\n *\n * const adapter = new GladiaAdapter();\n * adapter.initialize({\n * apiKey: process.env.GLADIA_API_KEY\n * });\n *\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en',\n * diarization: true\n * });\n *\n * console.log(result.data.text);\n * console.log(result.data.speakers);\n * ```\n *\n * @example With summarization\n * ```typescript\n * const result = await adapter.transcribe(audio, {\n * language: 'en',\n * summarization: true,\n * sentimentAnalysis: true\n * });\n *\n * console.log('Summary:', result.data.summary);\n * ```\n */\nexport class GladiaAdapter extends BaseAdapter {\n readonly name = \"gladia\" as const\n readonly capabilities: ProviderCapabilities = {\n streaming: true,\n diarization: true,\n wordTimestamps: true,\n languageDetection: true,\n customVocabulary: true,\n summarization: true,\n sentimentAnalysis: true,\n entityDetection: true,\n piiRedaction: false // Gladia doesn't have PII redaction in their API\n }\n\n protected baseUrl = \"https://api.gladia.io\"\n\n /**\n * Get axios config for generated API client functions\n * Configures headers and base URL using Gladia's x-gladia-key header\n */\n protected getAxiosConfig() {\n return super.getAxiosConfig(\"x-gladia-key\")\n }\n\n /**\n * Submit audio for transcription\n *\n * Sends audio to Gladia API for transcription. If a webhook URL is provided,\n * returns immediately with the job ID. Otherwise, polls until completion.\n *\n * @param audio - Audio input (currently only URL type supported)\n * @param options - Transcription options\n * @param options.language - Language code (e.g., 'en', 'es', 'fr')\n * @param options.languageDetection - Enable automatic language detection\n * @param options.diarization - Enable speaker identification\n * @param options.speakersExpected - Number of expected speakers (for diarization)\n * @param options.summarization - Generate text summary\n * @param options.sentimentAnalysis - Analyze sentiment of transcription\n * @param options.customVocabulary - Words to boost in recognition\n * @param options.webhookUrl - Callback URL for async results\n * @returns Normalized transcription response\n * @throws {Error} If audio type is not 'url' (file/stream not yet supported)\n *\n * @example Simple transcription\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * });\n * ```\n *\n * @example With advanced features\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * }, {\n * language: 'en',\n * diarization: true,\n * speakersExpected: 3,\n * summarization: true,\n * customVocabulary: ['API', 'TypeScript', 'JavaScript']\n * });\n * ```\n *\n * @example With webhook (returns job ID immediately for polling)\n * ```typescript\n * // Submit transcription with webhook\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * }, {\n * webhookUrl: 'https://myapp.com/webhook/transcription',\n * language: 'en'\n * });\n *\n * // Get job ID for polling\n * const jobId = result.data?.id;\n * console.log('Job ID:', jobId); // Use this to poll for status\n *\n * // Later: Poll for completion (if webhook fails or you want to check)\n * const status = await adapter.getTranscript(jobId);\n * if (status.data?.status === 'completed') {\n * console.log('Transcript:', status.data.text);\n * }\n * ```\n */\n async transcribe(\n audio: AudioInput,\n options?: TranscribeOptions\n ): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Build typed request using generated types\n const request = this.buildTranscriptionRequest(audio, options)\n\n // Use generated API client function - FULLY TYPED!\n const response = await preRecordedControllerInitPreRecordedJobV2(\n request,\n this.getAxiosConfig()\n )\n\n const jobId = response.data.id\n\n // If webhook is provided, return immediately with job ID\n if (options?.webhookUrl) {\n return {\n success: true,\n provider: this.name,\n data: {\n id: jobId,\n text: \"\",\n status: \"queued\"\n },\n raw: response.data\n }\n }\n\n // Otherwise, poll for results\n return await this.pollForCompletion(jobId)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Get transcription result by ID\n */\n async getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Use generated API client function - FULLY TYPED!\n const response = await preRecordedControllerGetPreRecordedJobV2(\n transcriptId,\n this.getAxiosConfig()\n )\n\n return this.normalizeResponse(response.data)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Build Gladia transcription request from unified options\n */\n private buildTranscriptionRequest(\n audio: AudioInput,\n options?: TranscribeOptions\n ): InitTranscriptionRequest {\n // Get audio URL\n let audioUrl: string\n if (audio.type === \"url\") {\n audioUrl = audio.url\n } else {\n throw new Error(\n \"Gladia adapter currently only supports URL-based audio input. Use audio.type='url'\"\n )\n }\n\n const request: InitTranscriptionRequest = {\n audio_url: audioUrl\n }\n\n // Map options to Gladia format\n if (options) {\n // Language configuration\n if (options.language || options.languageDetection) {\n request.language_config = {\n languages: options.language\n ? [options.language as TranscriptionLanguageCodeEnum]\n : undefined,\n code_switching: options.languageDetection\n }\n }\n\n // Diarization (speaker recognition)\n if (options.diarization) {\n request.diarization = true\n if (options.speakersExpected) {\n request.diarization_config = {\n number_of_speakers: options.speakersExpected\n }\n }\n }\n\n // Custom vocabulary\n if (options.customVocabulary && options.customVocabulary.length > 0) {\n request.custom_vocabulary = true\n request.custom_vocabulary_config = {\n vocabulary: options.customVocabulary\n }\n }\n\n // Summarization\n if (options.summarization) {\n request.summarization = true\n }\n\n // Sentiment analysis\n if (options.sentimentAnalysis) {\n request.sentiment_analysis = true\n }\n\n // Named entity recognition (entity detection)\n if (options.entityDetection) {\n request.named_entity_recognition = true\n }\n\n // Webhook callback\n if (options.webhookUrl) {\n request.callback = true\n request.callback_config = {\n url: options.webhookUrl\n }\n }\n\n // Custom metadata\n if (options.metadata) {\n request.custom_metadata = options.metadata\n }\n }\n\n return request\n }\n\n /**\n * Normalize Gladia response to unified format\n */\n private normalizeResponse(response: PreRecordedResponse): UnifiedTranscriptResponse {\n // Use utility to normalize status\n const status = normalizeStatus(response.status, \"gladia\")\n\n // Handle error state\n if (response.status === \"error\") {\n return {\n success: false,\n provider: this.name,\n error: {\n code: response.error_code?.toString() || ERROR_CODES.TRANSCRIPTION_ERROR,\n message: \"Transcription failed\",\n statusCode: response.error_code || undefined\n },\n raw: response\n }\n }\n\n // Extract transcription result\n const result = response.result\n const transcription = result?.transcription\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: response.id,\n text: transcription?.full_transcript || \"\",\n confidence: undefined, // Gladia doesn't provide overall confidence\n status,\n language: transcription?.languages?.[0], // Use first detected language\n duration: undefined, // Not directly available in Gladia response\n speakers: this.extractSpeakers(transcription),\n words: this.extractWords(transcription),\n utterances: this.extractUtterances(transcription),\n summary: result?.summarization?.results || undefined,\n metadata: {\n requestParams: response.request_params,\n customMetadata: response.custom_metadata\n },\n createdAt: response.created_at,\n completedAt: response.completed_at || undefined\n },\n raw: response\n }\n }\n\n /**\n * Extract speaker information from Gladia response\n */\n private extractSpeakers(transcription: TranscriptionDTO | undefined) {\n return extractSpeakersFromUtterances(\n transcription?.utterances,\n (utterance: UtteranceDTO) => utterance.speaker,\n (id) => `Speaker ${id}`\n )\n }\n\n /**\n * Extract word timestamps from Gladia response\n */\n private extractWords(transcription: TranscriptionDTO | undefined) {\n if (!transcription?.utterances) {\n return undefined\n }\n\n // Flatten all words from all utterances\n const allWords = transcription.utterances.flatMap((utterance: UtteranceDTO) =>\n utterance.words.map((word: WordDTO) => ({\n word,\n speaker: utterance.speaker\n }))\n )\n\n return extractWordsUtil(allWords, (item) => ({\n text: item.word.word,\n start: item.word.start,\n end: item.word.end,\n confidence: item.word.confidence,\n speaker: item.speaker?.toString()\n }))\n }\n\n /**\n * Extract utterances from Gladia response\n */\n private extractUtterances(transcription: TranscriptionDTO | undefined) {\n if (!transcription?.utterances) {\n return undefined\n }\n\n return transcription.utterances.map((utterance: UtteranceDTO) => ({\n text: utterance.text,\n start: utterance.start,\n end: utterance.end,\n speaker: utterance.speaker?.toString(),\n confidence: utterance.confidence,\n words: utterance.words.map((word: WordDTO) => ({\n text: word.word,\n start: word.start,\n end: word.end,\n confidence: word.confidence\n }))\n }))\n }\n\n /**\n * Poll for transcription completion\n */\n\n /**\n * Stream audio for real-time transcription\n *\n * Creates a WebSocket connection to Gladia for streaming transcription.\n * First initializes a session via REST API, then connects to WebSocket.\n *\n * @param options - Streaming configuration options\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n *\n * @example Real-time streaming\n * ```typescript\n * const session = await adapter.transcribeStream({\n * encoding: 'wav/pcm',\n * sampleRate: 16000,\n * channels: 1,\n * language: 'en',\n * interimResults: true\n * }, {\n * onOpen: () => console.log('Connected'),\n * onTranscript: (event) => {\n * if (event.isFinal) {\n * console.log('Final:', event.text);\n * } else {\n * console.log('Interim:', event.text);\n * }\n * },\n * onError: (error) => console.error('Error:', error),\n * onClose: () => console.log('Disconnected')\n * });\n *\n * // Send audio chunks\n * const audioChunk = getAudioChunk(); // Your audio source\n * await session.sendAudio({ data: audioChunk });\n *\n * // Close when done\n * await session.close();\n * ```\n */\n async transcribeStream(\n options?: StreamingOptions,\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession> {\n this.validateConfig()\n\n // Validate sample rate against OpenAPI-generated enum\n let validatedSampleRate: StreamingSupportedSampleRateEnum | undefined\n if (options?.sampleRate) {\n validatedSampleRate = validateEnumValue(\n options.sampleRate,\n StreamingSupportedSampleRateEnum,\n \"sample rate\",\n \"Gladia\"\n )\n }\n\n // Build typed streaming request using OpenAPI-generated types\n const streamingRequest: Partial<StreamingRequest> = {\n encoding: options?.encoding\n ? (mapEncodingToProvider(options.encoding, \"gladia\") as StreamingSupportedEncodingEnum)\n : undefined,\n sample_rate: validatedSampleRate,\n channels: options?.channels,\n endpointing: options?.endpointing\n }\n\n if (options?.language) {\n streamingRequest.language_config = {\n languages: [options.language as TranscriptionLanguageCodeEnum]\n }\n }\n\n // Use generated API client function - FULLY TYPED!\n const initResponse = await streamingControllerInitStreamingSessionV2(\n streamingRequest as StreamingRequest,\n undefined, // no params\n this.getAxiosConfig()\n )\n\n const { id, url: wsUrl } = initResponse.data\n\n // Step 2: Connect to WebSocket\n const ws = new WebSocket(wsUrl)\n\n let sessionStatus: SessionStatus = \"connecting\"\n\n // Setup standard WebSocket event handlers\n setupWebSocketHandlers(ws, callbacks, (status) => {\n sessionStatus = status\n })\n\n ws.on(\"message\", (data: Buffer) => {\n try {\n const message = JSON.parse(data.toString())\n\n // Handle different message types from Gladia\n if (message.type === \"transcript\") {\n // Type-safe: cast to TranscriptMessage after checking type\n const transcriptMessage = message as TranscriptMessage\n const messageData = transcriptMessage.data\n const utterance = messageData.utterance\n\n callbacks?.onTranscript?.({\n type: \"transcript\",\n text: utterance.text,\n isFinal: messageData.is_final,\n confidence: utterance.confidence,\n words: utterance.words.map((word) => ({\n text: word.word,\n start: word.start,\n end: word.end,\n confidence: word.confidence\n })),\n data: message\n })\n } else if (message.type === \"utterance\") {\n // Utterance completed - extract from nested data.utterance structure\n const transcriptMessage = message as TranscriptMessage\n const messageData = transcriptMessage.data\n const utterance = messageData.utterance\n\n const utteranceData = {\n text: utterance.text,\n start: utterance.start,\n end: utterance.end,\n speaker: utterance.speaker?.toString(),\n confidence: utterance.confidence,\n words: utterance.words.map((word) => ({\n text: word.word,\n start: word.start,\n end: word.end,\n confidence: word.confidence\n }))\n }\n callbacks?.onUtterance?.(utteranceData)\n } else if (message.type === \"metadata\") {\n callbacks?.onMetadata?.(message)\n }\n } catch (error) {\n callbacks?.onError?.({\n code: ERROR_CODES.PARSE_ERROR,\n message: \"Failed to parse WebSocket message\",\n details: error\n })\n }\n })\n\n // Wait for WebSocket connection to open\n await waitForWebSocketOpen(ws)\n\n // Return StreamingSession interface\n return {\n id,\n provider: this.name,\n createdAt: new Date(),\n getStatus: () => sessionStatus,\n sendAudio: async (chunk: AudioChunk) => {\n // Validate session is ready\n validateSessionForAudio(sessionStatus, ws.readyState, WebSocket.OPEN)\n\n // Send raw audio data\n ws.send(chunk.data)\n\n // Send stop recording message if this is the last chunk\n if (chunk.isLast) {\n ws.send(\n JSON.stringify({\n type: \"stop_recording\"\n })\n )\n }\n },\n close: async () => {\n if (sessionStatus === \"closed\" || sessionStatus === \"closing\") {\n return\n }\n\n sessionStatus = \"closing\"\n\n // Send stop recording message before closing\n if (ws.readyState === WebSocket.OPEN) {\n ws.send(\n JSON.stringify({\n type: \"stop_recording\"\n })\n )\n }\n\n // Close WebSocket with utility\n await closeWebSocket(ws)\n sessionStatus = \"closed\"\n }\n }\n }\n}\n\n/**\n * Factory function to create a Gladia adapter\n */\nexport function createGladiaAdapter(config: ProviderConfig): GladiaAdapter {\n const adapter = new GladiaAdapter()\n adapter.initialize(config)\n return adapter\n}\n","/**\n * Unified audio encoding types for Voice Router SDK\n *\n * These types provide strict typing for audio formats across all providers,\n * preventing common bugs like passing unsupported encoding formats.\n */\n\n/**\n * Unified audio encoding formats supported across providers\n *\n * - `linear16`: PCM 16-bit linear (universal support)\n * - `mulaw`: μ-law 8-bit (Gladia, Deepgram)\n * - `alaw`: A-law 8-bit (Gladia only)\n * - `flac`: FLAC codec (Deepgram only)\n * - `opus`: Opus codec (Deepgram only)\n * - `speex`: Speex codec (Deepgram only)\n * - `amr-nb`: AMR narrowband (Deepgram only)\n * - `amr-wb`: AMR wideband (Deepgram only)\n * - `g729`: G.729 codec (Deepgram only)\n */\nexport type AudioEncoding =\n // Universal - supported by most providers\n | \"linear16\"\n // μ-law and A-law - telephony codecs\n | \"mulaw\"\n | \"alaw\"\n // Advanced codecs - Deepgram specific\n | \"flac\"\n | \"opus\"\n | \"speex\"\n | \"amr-nb\"\n | \"amr-wb\"\n | \"g729\"\n\n/**\n * Standard sample rates (Hz) for audio streaming\n */\nexport type AudioSampleRate = 8000 | 16000 | 32000 | 44100 | 48000\n\n/**\n * Standard bit depths for PCM audio\n */\nexport type AudioBitDepth = 8 | 16 | 24 | 32\n\n/**\n * Audio channel configurations\n */\nexport type AudioChannels = 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8\n\n/**\n * Provider-specific encoding format mappings\n *\n * Each provider may have different names for the same codec.\n * These mappings translate between unified format and provider format.\n */\nexport interface EncodingMapping {\n /** Unified encoding format */\n unified: AudioEncoding\n /** Provider-specific encoding string */\n provider: string\n}\n\n/**\n * Gladia encoding mappings\n * Gladia uses \"wav/xxx\" format for streaming\n */\nexport const GLADIA_ENCODING_MAP: Record<string, string> = {\n linear16: \"wav/pcm\",\n mulaw: \"wav/ulaw\",\n alaw: \"wav/alaw\"\n} as const\n\n/**\n * Deepgram encoding mappings\n * Deepgram uses lowercase format names\n */\nexport const DEEPGRAM_ENCODING_MAP: Record<string, string> = {\n linear16: \"linear16\",\n mulaw: \"mulaw\",\n flac: \"flac\",\n opus: \"opus\",\n speex: \"speex\",\n \"amr-nb\": \"amr-nb\",\n \"amr-wb\": \"amr-wb\",\n g729: \"g729\"\n} as const\n\n/**\n * AssemblyAI encoding mappings\n * AssemblyAI uses pcm_s16le for streaming\n */\nexport const ASSEMBLYAI_ENCODING_MAP: Record<string, string> = {\n linear16: \"pcm_s16le\"\n} as const\n\n/**\n * Get provider-specific encoding format from unified format\n *\n * @param unifiedEncoding - Unified encoding format\n * @param provider - Target provider\n * @returns Provider-specific encoding string\n * @throws Error if encoding is not supported by provider\n *\n * @example\n * ```typescript\n * const gladiaEncoding = mapEncodingToProvider('linear16', 'gladia')\n * // Returns: 'wav/pcm'\n *\n * const deepgramEncoding = mapEncodingToProvider('linear16', 'deepgram')\n * // Returns: 'linear16'\n * ```\n */\nexport function mapEncodingToProvider(\n unifiedEncoding: AudioEncoding,\n provider: \"gladia\" | \"deepgram\" | \"assemblyai\"\n): string {\n let mapping: Record<string, string>\n\n switch (provider) {\n case \"gladia\":\n mapping = GLADIA_ENCODING_MAP\n break\n case \"deepgram\":\n mapping = DEEPGRAM_ENCODING_MAP\n break\n case \"assemblyai\":\n mapping = ASSEMBLYAI_ENCODING_MAP\n break\n }\n\n const providerEncoding = mapping[unifiedEncoding]\n\n if (!providerEncoding) {\n throw new Error(\n `Encoding '${unifiedEncoding}' is not supported by ${provider}. ` +\n `Supported encodings: ${Object.keys(mapping).join(\", \")}`\n )\n }\n\n return providerEncoding\n}\n\n/**\n * Validate audio configuration for a specific provider\n *\n * @param config - Audio configuration to validate\n * @param provider - Target provider\n * @throws Error if configuration is invalid for the provider\n */\nexport function validateAudioConfig(\n config: {\n encoding?: AudioEncoding\n sampleRate?: AudioSampleRate\n channels?: AudioChannels\n bitDepth?: AudioBitDepth\n },\n provider: \"gladia\" | \"deepgram\" | \"assemblyai\"\n): void {\n if (config.encoding) {\n // This will throw if encoding is not supported\n mapEncodingToProvider(config.encoding, provider)\n }\n\n // Provider-specific validations\n if (provider === \"gladia\") {\n if (config.channels && (config.channels < 1 || config.channels > 8)) {\n throw new Error(\"Gladia supports 1-8 audio channels\")\n }\n }\n\n if (provider === \"assemblyai\" && config.encoding && config.encoding !== \"linear16\") {\n throw new Error(\"AssemblyAI streaming only supports linear16 encoding\")\n }\n}\n","/**\n * WebSocket utility functions for streaming transcription\n *\n * Provides reusable helpers for WebSocket connection management,\n * event handling, and session validation.\n */\n\nimport type WebSocket from \"ws\"\nimport type { StreamingCallbacks, SessionStatus } from \"../router/types\"\nimport { DEFAULT_TIMEOUTS } from \"../constants/defaults\"\nimport { ERROR_CODES, createError } from \"./errors\"\n\n/**\n * Wait for WebSocket connection to open with timeout\n *\n * @param ws - WebSocket instance\n * @param timeoutMs - Connection timeout in milliseconds\n * @returns Promise that resolves when connection opens\n * @throws Error if connection times out or fails\n *\n * @example\n * ```typescript\n * const ws = new WebSocket(url)\n * await waitForWebSocketOpen(ws)\n * // WebSocket is now open and ready\n * ```\n */\nexport function waitForWebSocketOpen(\n ws: WebSocket,\n timeoutMs: number = DEFAULT_TIMEOUTS.WS_CONNECTION\n): Promise<void> {\n return new Promise<void>((resolve, reject) => {\n const timeout = setTimeout(() => {\n reject(new Error(\"WebSocket connection timeout\"))\n }, timeoutMs)\n\n ws.once(\"open\", () => {\n clearTimeout(timeout)\n resolve()\n })\n\n ws.once(\"error\", (error) => {\n clearTimeout(timeout)\n reject(error)\n })\n })\n}\n\n/**\n * Close WebSocket gracefully with timeout\n *\n * Attempts graceful close, but will forcefully terminate if timeout is reached.\n *\n * @param ws - WebSocket instance\n * @param timeoutMs - Close timeout in milliseconds\n * @returns Promise that resolves when connection is closed\n *\n * @example\n * ```typescript\n * await closeWebSocket(ws)\n * // WebSocket is now closed\n * ```\n */\nexport function closeWebSocket(\n ws: WebSocket,\n timeoutMs: number = DEFAULT_TIMEOUTS.WS_CLOSE\n): Promise<void> {\n return new Promise<void>((resolve) => {\n const timeout = setTimeout(() => {\n ws.terminate()\n resolve()\n }, timeoutMs)\n\n ws.close()\n\n ws.once(\"close\", () => {\n clearTimeout(timeout)\n resolve()\n })\n })\n}\n\n/**\n * Setup standard WebSocket event handlers\n *\n * Configures consistent event handling for open, error, and close events\n * across all streaming adapters.\n *\n * @param ws - WebSocket instance\n * @param callbacks - Streaming callbacks from user\n * @param setSessionStatus - Function to update session status\n *\n * @example\n * ```typescript\n * let sessionStatus: SessionStatus = \"connecting\"\n *\n * setupWebSocketHandlers(\n * ws,\n * callbacks,\n * (status) => { sessionStatus = status }\n * )\n * ```\n */\nexport function setupWebSocketHandlers(\n ws: WebSocket,\n callbacks: StreamingCallbacks | undefined,\n setSessionStatus: (status: SessionStatus) => void\n): void {\n ws.on(\"open\", () => {\n setSessionStatus(\"open\")\n callbacks?.onOpen?.()\n })\n\n ws.on(\"error\", (error: Error) => {\n callbacks?.onError?.(createError(ERROR_CODES.WEBSOCKET_ERROR, error.message, error))\n })\n\n ws.on(\"close\", (code: number, reason: Buffer) => {\n setSessionStatus(\"closed\")\n callbacks?.onClose?.(code, reason.toString())\n })\n}\n\n/**\n * Validate that WebSocket session is ready to send audio\n *\n * Checks both session status and WebSocket ready state before allowing\n * audio data to be sent.\n *\n * @param sessionStatus - Current session status\n * @param wsReadyState - WebSocket readyState value\n * @param WebSocketOpen - WebSocket.OPEN constant value\n * @throws Error if session is not ready\n *\n * @example\n * ```typescript\n * validateSessionForAudio(sessionStatus, ws.readyState, WebSocket.OPEN)\n * ws.send(audioData) // Safe to send now\n * ```\n */\nexport function validateSessionForAudio(\n sessionStatus: SessionStatus,\n wsReadyState: number,\n WebSocketOpen: number\n): void {\n if (sessionStatus !== \"open\") {\n throw new Error(`Cannot send audio: session is ${sessionStatus}`)\n }\n\n if (wsReadyState !== WebSocketOpen) {\n throw new Error(\"WebSocket is not open\")\n }\n}\n","/**\n * Validation utilities for Voice Router SDK\n *\n * Provides generic validators for OpenAPI-generated enum types,\n * ensuring type-safe validation across all providers.\n */\n\n/**\n * Validate that a value is a member of an OpenAPI-generated enum\n *\n * This generic validator works with any enum type generated by Orval from\n * OpenAPI specifications, providing consistent validation and error messages.\n *\n * @param value - Value to validate\n * @param enumType - OpenAPI-generated enum object\n * @param fieldName - Human-readable field name for error messages\n * @param provider - Provider name for error messages\n * @returns The validated value, properly typed as the enum type\n * @throws Error if value is not in the enum\n *\n * @example Validate Gladia sample rate\n * ```typescript\n * import { StreamingSupportedSampleRateEnum } from \"../generated/gladia/schema/streamingSupportedSampleRateEnum\"\n *\n * const validatedRate = validateEnumValue(\n * 16000,\n * StreamingSupportedSampleRateEnum,\n * \"sample rate\",\n * \"Gladia\"\n * )\n * // Returns: 16000 (typed as StreamingSupportedSampleRateEnum)\n *\n * validateEnumValue(\n * 22050,\n * StreamingSupportedSampleRateEnum,\n * \"sample rate\",\n * \"Gladia\"\n * )\n * // Throws: \"Gladia does not support sample rate '22050'. Supported values (from OpenAPI spec): 8000, 16000, 32000, 44100, 48000\"\n * ```\n *\n * @example Validate Deepgram encoding\n * ```typescript\n * import { ListenV1EncodingParameter } from \"../generated/deepgram/schema/listenV1EncodingParameter\"\n *\n * const encoding = validateEnumValue(\n * \"linear16\",\n * ListenV1EncodingParameter,\n * \"encoding\",\n * \"Deepgram\"\n * )\n * ```\n */\nexport function validateEnumValue<T extends Record<string, string | number>>(\n value: unknown,\n enumType: T,\n fieldName: string,\n provider: string\n): T[keyof T] {\n const validValues = Object.values(enumType)\n const isValid = validValues.some((v) => v === value)\n\n if (!isValid) {\n throw new Error(\n `${provider} does not support ${fieldName} '${value}'. ` +\n `Supported values (from OpenAPI spec): ${validValues.join(\", \")}`\n )\n }\n\n return value as T[keyof T]\n}\n\n/**\n * Validate optional enum value\n *\n * Like validateEnumValue but returns undefined if value is nullish,\n * useful for optional configuration parameters.\n *\n * @param value - Value to validate (can be undefined/null)\n * @param enumType - OpenAPI-generated enum object\n * @param fieldName - Human-readable field name for error messages\n * @param provider - Provider name for error messages\n * @returns The validated value or undefined\n * @throws Error if value is not in the enum (but not if undefined/null)\n *\n * @example\n * ```typescript\n * const rate = validateOptionalEnumValue(\n * options?.sampleRate,\n * StreamingSupportedSampleRateEnum,\n * \"sample rate\",\n * \"Gladia\"\n * )\n * // Returns: validated rate or undefined\n * ```\n */\nexport function validateOptionalEnumValue<T extends Record<string, string | number>>(\n value: unknown,\n enumType: T,\n fieldName: string,\n provider: string\n): T[keyof T] | undefined {\n if (value === undefined || value === null) {\n return undefined\n }\n\n return validateEnumValue(value, enumType, fieldName, provider)\n}\n","/**\n * Transcription processing utilities\n *\n * Provides reusable helpers for extracting and normalizing transcription\n * data (speakers, words, utterances) across different provider formats.\n */\n\nimport type { Speaker, Word, TranscriptionStatus } from \"../router/types\"\n\n/**\n * Extract unique speakers from utterances\n *\n * Generic helper that works with any provider's utterance format via\n * a mapping function to extract speaker IDs.\n *\n * @param utterances - Provider-specific utterances array\n * @param getSpeakerId - Function to extract speaker ID from utterance\n * @param formatLabel - Optional function to format speaker label\n * @returns Array of unique speakers or undefined if none found\n *\n * @example Gladia\n * ```typescript\n * const speakers = extractSpeakersFromUtterances(\n * transcription?.utterances,\n * (utterance) => utterance.speaker,\n * (id) => `Speaker ${id}`\n * )\n * ```\n *\n * @example AssemblyAI (already has good labels)\n * ```typescript\n * const speakers = extractSpeakersFromUtterances(\n * transcript.utterances,\n * (utterance) => utterance.speaker,\n * (id) => id // Keep as-is: \"A\", \"B\", \"C\"\n * )\n * ```\n */\nexport function extractSpeakersFromUtterances<T>(\n utterances: T[] | undefined | null,\n getSpeakerId: (utterance: T) => string | number | undefined,\n formatLabel?: (speakerId: string) => string\n): Speaker[] | undefined {\n if (!utterances || utterances.length === 0) {\n return undefined\n }\n\n const speakerSet = new Set<string>()\n\n utterances.forEach((utterance) => {\n const speakerId = getSpeakerId(utterance)\n if (speakerId !== undefined) {\n speakerSet.add(String(speakerId))\n }\n })\n\n if (speakerSet.size === 0) {\n return undefined\n }\n\n return Array.from(speakerSet).map((speakerId) => ({\n id: speakerId,\n label: formatLabel ? formatLabel(speakerId) : `Speaker ${speakerId}`\n }))\n}\n\n/**\n * Extract and normalize words from provider-specific format\n *\n * Generic helper that maps provider word formats to unified Word type.\n *\n * @param words - Provider-specific words array\n * @param mapper - Function to convert provider word to unified Word\n * @returns Array of normalized words or undefined if none found\n *\n * @example Gladia\n * ```typescript\n * const words = extractWords(\n * allWords,\n * (word: WordDTO) => ({\n * text: word.word,\n * start: word.start,\n * end: word.end,\n * confidence: word.confidence\n * })\n * )\n * ```\n */\nexport function extractWords<T>(\n words: T[] | undefined | null,\n mapper: (word: T) => Word\n): Word[] | undefined {\n if (!words || words.length === 0) {\n return undefined\n }\n\n const normalizedWords = words.map(mapper)\n return normalizedWords.length > 0 ? normalizedWords : undefined\n}\n\n/**\n * Status mapping configurations for each provider\n *\n * Maps provider-specific status strings to unified TranscriptionStatus.\n * Keys are lowercase provider status values, values are unified statuses.\n */\nexport const STATUS_MAPPINGS = {\n gladia: {\n queued: \"queued\" as TranscriptionStatus,\n processing: \"processing\" as TranscriptionStatus,\n done: \"completed\" as TranscriptionStatus,\n error: \"error\" as TranscriptionStatus\n },\n assemblyai: {\n queued: \"queued\" as TranscriptionStatus,\n processing: \"processing\" as TranscriptionStatus,\n completed: \"completed\" as TranscriptionStatus,\n error: \"error\" as TranscriptionStatus\n },\n deepgram: {\n queued: \"queued\" as TranscriptionStatus,\n processing: \"processing\" as TranscriptionStatus,\n completed: \"completed\" as TranscriptionStatus,\n error: \"error\" as TranscriptionStatus\n },\n azure: {\n succeeded: \"completed\" as TranscriptionStatus,\n running: \"processing\" as TranscriptionStatus,\n notstarted: \"queued\" as TranscriptionStatus,\n failed: \"error\" as TranscriptionStatus\n },\n speechmatics: {\n running: \"processing\" as TranscriptionStatus,\n done: \"completed\" as TranscriptionStatus,\n rejected: \"error\" as TranscriptionStatus,\n expired: \"error\" as TranscriptionStatus\n }\n} as const\n\nexport type SupportedProvider = keyof typeof STATUS_MAPPINGS\n\n/**\n * Normalize provider status to unified status\n *\n * Handles both exact matches and substring matches (for Azure-style statuses).\n *\n * @param providerStatus - Status string from provider API\n * @param provider - Provider name\n * @param defaultStatus - Fallback status if no match found\n * @returns Unified transcription status\n *\n * @example Gladia\n * ```typescript\n * const status = normalizeStatus(response.status, \"gladia\")\n * // \"done\" -> \"completed\"\n * ```\n *\n * @example Azure (substring matching)\n * ```typescript\n * const status = normalizeStatus(\"Succeeded\", \"azure\")\n * // Case-insensitive substring match: \"Succeeded\" contains \"succeeded\" -> \"completed\"\n * ```\n */\nexport function normalizeStatus(\n providerStatus: string | undefined | null,\n provider: SupportedProvider,\n defaultStatus: TranscriptionStatus = \"queued\"\n): TranscriptionStatus {\n if (!providerStatus) return defaultStatus\n\n const mapping = STATUS_MAPPINGS[provider]\n const statusKey = providerStatus.toString().toLowerCase()\n\n // Try exact match first\n if (statusKey in mapping) {\n return mapping[statusKey as keyof typeof mapping]\n }\n\n // Try substring match (for Azure-style statuses like \"Succeeded\", \"NotStarted\")\n for (const [key, value] of Object.entries(mapping)) {\n if (statusKey.includes(key)) {\n return value\n }\n }\n\n return defaultStatus\n}\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nimport { faker } from \"@faker-js/faker\"\nimport type { AxiosRequestConfig, AxiosResponse } from \"axios\"\nimport axios from \"axios\"\nimport { delay, HttpResponse, http } from \"msw\"\nimport type {\n AudioToTextControllerAudioTranscriptionBody,\n AudioUploadResponse,\n CustomVocabularyEntryDTO,\n FileControllerUploadV2BodyOne,\n FileControllerUploadV2BodyTwo,\n HistoryControllerGetListV1Params,\n InitPreRecordedTranscriptionResponse,\n InitStreamingResponse,\n InitTranscriptionRequest,\n ListHistoryResponse,\n ListPreRecordedResponse,\n ListStreamingResponse,\n ListTranscriptionResponse,\n PatchRequestParamsDTO,\n PreRecordedControllerGetPreRecordedJobsV2Params,\n PreRecordedResponse,\n StreamingControllerGetStreamingJobsV2Params,\n StreamingControllerInitStreamingSessionV2Params,\n StreamingRequest,\n StreamingResponse,\n TranscriptionControllerGetTranscriptV2200,\n TranscriptionControllerListV2Params,\n VideoToTextControllerVideoTranscriptionBody\n} from \"../schema\"\nimport {\n CallbackMethodEnum,\n StreamingSupportedEncodingEnum,\n StreamingSupportedModels,\n SubtitlesFormatEnum,\n SubtitlesStyleEnum,\n SummaryTypesEnum,\n TranscriptionLanguageCodeEnum,\n TranslationLanguageCodeEnum,\n TranslationModelEnum\n} from \"../schema\"\n\n/**\n * @summary Upload an audio file or provide an audio URL for processing\n */\nexport const fileControllerUploadV2 = <TData = AxiosResponse<AudioUploadResponse>>(\n fileControllerUploadV2Body: FileControllerUploadV2BodyOne | FileControllerUploadV2BodyTwo,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/v2/upload\", fileControllerUploadV2Body, options)\n}\n\n/**\n * @summary Initiate a new pre recorded job\n */\nexport const preRecordedControllerInitPreRecordedJobV2 = <\n TData = AxiosResponse<InitPreRecordedTranscriptionResponse>\n>(\n initTranscriptionRequest: InitTranscriptionRequest,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/v2/pre-recorded\", initTranscriptionRequest, options)\n}\n\n/**\n * @summary Get pre recorded jobs based on query parameters\n */\nexport const preRecordedControllerGetPreRecordedJobsV2 = <\n TData = AxiosResponse<ListPreRecordedResponse>\n>(\n params?: PreRecordedControllerGetPreRecordedJobsV2Params,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/v2/pre-recorded\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Get the pre recorded job's metadata\n */\nexport const preRecordedControllerGetPreRecordedJobV2 = <\n TData = AxiosResponse<PreRecordedResponse>\n>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/pre-recorded/${id}`, options)\n}\n\n/**\n * @summary Delete the pre recorded job\n */\nexport const preRecordedControllerDeletePreRecordedJobV2 = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/v2/pre-recorded/${id}`, options)\n}\n\n/**\n * @summary Download the audio file used for this pre recorded job\n */\nexport const preRecordedControllerGetAudioV2 = <TData = AxiosResponse<Blob>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/pre-recorded/${id}/file`, {\n responseType: \"blob\",\n ...options\n })\n}\n\n/**\n * @summary Initiate a new transcription job\n */\nexport const transcriptionControllerInitPreRecordedJobV2 = <\n TData = AxiosResponse<InitPreRecordedTranscriptionResponse>\n>(\n initTranscriptionRequest: InitTranscriptionRequest,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/v2/transcription\", initTranscriptionRequest, options)\n}\n\n/**\n * @summary Get transcription jobs based on query parameters\n */\nexport const transcriptionControllerListV2 = <TData = AxiosResponse<ListTranscriptionResponse>>(\n params?: TranscriptionControllerListV2Params,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/v2/transcription\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Get the transcription job's metadata\n */\nexport const transcriptionControllerGetTranscriptV2 = <\n TData = AxiosResponse<TranscriptionControllerGetTranscriptV2200>\n>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcription/${id}`, options)\n}\n\n/**\n * @summary Delete the transcription job\n */\nexport const transcriptionControllerDeleteTranscriptV2 = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/v2/transcription/${id}`, options)\n}\n\n/**\n * @summary Download the audio file used for this transcription job\n */\nexport const transcriptionControllerGetAudioV2 = <TData = AxiosResponse<Blob>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcription/${id}/file`, {\n responseType: \"blob\",\n ...options\n })\n}\n\nexport const audioToTextControllerAudioTranscription = <TData = AxiosResponse<void>>(\n audioToTextControllerAudioTranscriptionBody: AudioToTextControllerAudioTranscriptionBody,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n const formData = new FormData()\n if (audioToTextControllerAudioTranscriptionBody.audio !== undefined) {\n formData.append(\"audio\", audioToTextControllerAudioTranscriptionBody.audio)\n }\n if (audioToTextControllerAudioTranscriptionBody.audio_url !== undefined) {\n formData.append(\"audio_url\", audioToTextControllerAudioTranscriptionBody.audio_url)\n }\n if (audioToTextControllerAudioTranscriptionBody.language_behaviour !== undefined) {\n formData.append(\n \"language_behaviour\",\n audioToTextControllerAudioTranscriptionBody.language_behaviour\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.language !== undefined) {\n formData.append(\"language\", audioToTextControllerAudioTranscriptionBody.language)\n }\n if (audioToTextControllerAudioTranscriptionBody.transcription_hint !== undefined) {\n formData.append(\n \"transcription_hint\",\n audioToTextControllerAudioTranscriptionBody.transcription_hint\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.toggle_diarization !== undefined) {\n formData.append(\n \"toggle_diarization\",\n audioToTextControllerAudioTranscriptionBody.toggle_diarization.toString()\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.diarization_num_speakers !== undefined) {\n formData.append(\n \"diarization_num_speakers\",\n audioToTextControllerAudioTranscriptionBody.diarization_num_speakers.toString()\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.diarization_min_speakers !== undefined) {\n formData.append(\n \"diarization_min_speakers\",\n audioToTextControllerAudioTranscriptionBody.diarization_min_speakers.toString()\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.diarization_max_speakers !== undefined) {\n formData.append(\n \"diarization_max_speakers\",\n audioToTextControllerAudioTranscriptionBody.diarization_max_speakers.toString()\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.toggle_direct_translate !== undefined) {\n formData.append(\n \"toggle_direct_translate\",\n audioToTextControllerAudioTranscriptionBody.toggle_direct_translate.toString()\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.target_translation_language !== undefined) {\n formData.append(\n \"target_translation_language\",\n audioToTextControllerAudioTranscriptionBody.target_translation_language\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.output_format !== undefined) {\n formData.append(\"output_format\", audioToTextControllerAudioTranscriptionBody.output_format)\n }\n if (audioToTextControllerAudioTranscriptionBody.toggle_noise_reduction !== undefined) {\n formData.append(\n \"toggle_noise_reduction\",\n audioToTextControllerAudioTranscriptionBody.toggle_noise_reduction.toString()\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.toggle_accurate_words_timestamps !== undefined) {\n formData.append(\n \"toggle_accurate_words_timestamps\",\n audioToTextControllerAudioTranscriptionBody.toggle_accurate_words_timestamps.toString()\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.webhook_url !== undefined) {\n formData.append(\"webhook_url\", audioToTextControllerAudioTranscriptionBody.webhook_url)\n }\n\n return axios.post(\"/audio/text/audio-transcription\", formData, options)\n}\n\nexport const videoToTextControllerVideoTranscription = <TData = AxiosResponse<void>>(\n videoToTextControllerVideoTranscriptionBody: VideoToTextControllerVideoTranscriptionBody,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n const formData = new FormData()\n if (videoToTextControllerVideoTranscriptionBody.video !== undefined) {\n formData.append(\"video\", videoToTextControllerVideoTranscriptionBody.video)\n }\n if (videoToTextControllerVideoTranscriptionBody.video_url !== undefined) {\n formData.append(\"video_url\", videoToTextControllerVideoTranscriptionBody.video_url)\n }\n if (videoToTextControllerVideoTranscriptionBody.language_behaviour !== undefined) {\n formData.append(\n \"language_behaviour\",\n videoToTextControllerVideoTranscriptionBody.language_behaviour\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.language !== undefined) {\n formData.append(\"language\", videoToTextControllerVideoTranscriptionBody.language)\n }\n if (videoToTextControllerVideoTranscriptionBody.transcription_hint !== undefined) {\n formData.append(\n \"transcription_hint\",\n videoToTextControllerVideoTranscriptionBody.transcription_hint\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.toggle_diarization !== undefined) {\n formData.append(\n \"toggle_diarization\",\n videoToTextControllerVideoTranscriptionBody.toggle_diarization.toString()\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.diarization_num_speakers !== undefined) {\n formData.append(\n \"diarization_num_speakers\",\n videoToTextControllerVideoTranscriptionBody.diarization_num_speakers.toString()\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.diarization_min_speakers !== undefined) {\n formData.append(\n \"diarization_min_speakers\",\n videoToTextControllerVideoTranscriptionBody.diarization_min_speakers.toString()\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.diarization_max_speakers !== undefined) {\n formData.append(\n \"diarization_max_speakers\",\n videoToTextControllerVideoTranscriptionBody.diarization_max_speakers.toString()\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.toggle_direct_translate !== undefined) {\n formData.append(\n \"toggle_direct_translate\",\n videoToTextControllerVideoTranscriptionBody.toggle_direct_translate.toString()\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.target_translation_language !== undefined) {\n formData.append(\n \"target_translation_language\",\n videoToTextControllerVideoTranscriptionBody.target_translation_language\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.output_format !== undefined) {\n formData.append(\"output_format\", videoToTextControllerVideoTranscriptionBody.output_format)\n }\n if (videoToTextControllerVideoTranscriptionBody.toggle_noise_reduction !== undefined) {\n formData.append(\n \"toggle_noise_reduction\",\n videoToTextControllerVideoTranscriptionBody.toggle_noise_reduction.toString()\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.toggle_accurate_words_timestamps !== undefined) {\n formData.append(\n \"toggle_accurate_words_timestamps\",\n videoToTextControllerVideoTranscriptionBody.toggle_accurate_words_timestamps.toString()\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.webhook_url !== undefined) {\n formData.append(\"webhook_url\", videoToTextControllerVideoTranscriptionBody.webhook_url)\n }\n\n return axios.post(\"/video/text/video-transcription\", formData, options)\n}\n\n/**\n * @summary Get the history of all your jobs\n */\nexport const historyControllerGetListV1 = <TData = AxiosResponse<ListHistoryResponse>>(\n params?: HistoryControllerGetListV1Params,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/v1/history\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Initiate a new live job\n */\nexport const streamingControllerInitStreamingSessionV2 = <\n TData = AxiosResponse<InitStreamingResponse>\n>(\n streamingRequest: StreamingRequest,\n params?: StreamingControllerInitStreamingSessionV2Params,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/v2/live\", streamingRequest, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Get live jobs based on query parameters\n */\nexport const streamingControllerGetStreamingJobsV2 = <TData = AxiosResponse<ListStreamingResponse>>(\n params?: StreamingControllerGetStreamingJobsV2Params,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/v2/live\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Get the live job's metadata\n */\nexport const streamingControllerGetStreamingJobV2 = <TData = AxiosResponse<StreamingResponse>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/live/${id}`, options)\n}\n\n/**\n * @summary Delete the live job\n */\nexport const streamingControllerDeleteStreamingJobV2 = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/v2/live/${id}`, options)\n}\n\n/**\n * @summary For debugging purposes, send post session metadata in the request params of the job\n */\nexport const streamingControllerPatchRequestParamsV2 = <TData = AxiosResponse<void>>(\n id: string,\n patchRequestParamsDTO: PatchRequestParamsDTO,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/v2/live/${id}`, patchRequestParamsDTO, options)\n}\n\n/**\n * @summary Download the audio file used for this live job\n */\nexport const streamingControllerGetAudioV2 = <TData = AxiosResponse<Blob>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/live/${id}/file`, {\n responseType: \"blob\",\n ...options\n })\n}\n\nexport type FileControllerUploadV2Result = AxiosResponse<AudioUploadResponse>\nexport type PreRecordedControllerInitPreRecordedJobV2Result =\n AxiosResponse<InitPreRecordedTranscriptionResponse>\nexport type PreRecordedControllerGetPreRecordedJobsV2Result = AxiosResponse<ListPreRecordedResponse>\nexport type PreRecordedControllerGetPreRecordedJobV2Result = AxiosResponse<PreRecordedResponse>\nexport type PreRecordedControllerDeletePreRecordedJobV2Result = AxiosResponse<void>\nexport type PreRecordedControllerGetAudioV2Result = AxiosResponse<Blob>\nexport type TranscriptionControllerInitPreRecordedJobV2Result =\n AxiosResponse<InitPreRecordedTranscriptionResponse>\nexport type TranscriptionControllerListV2Result = AxiosResponse<ListTranscriptionResponse>\nexport type TranscriptionControllerGetTranscriptV2Result =\n AxiosResponse<TranscriptionControllerGetTranscriptV2200>\nexport type TranscriptionControllerDeleteTranscriptV2Result = AxiosResponse<void>\nexport type TranscriptionControllerGetAudioV2Result = AxiosResponse<Blob>\nexport type AudioToTextControllerAudioTranscriptionResult = AxiosResponse<void>\nexport type VideoToTextControllerVideoTranscriptionResult = AxiosResponse<void>\nexport type HistoryControllerGetListV1Result = AxiosResponse<ListHistoryResponse>\nexport type StreamingControllerInitStreamingSessionV2Result = AxiosResponse<InitStreamingResponse>\nexport type StreamingControllerGetStreamingJobsV2Result = AxiosResponse<ListStreamingResponse>\nexport type StreamingControllerGetStreamingJobV2Result = AxiosResponse<StreamingResponse>\nexport type StreamingControllerDeleteStreamingJobV2Result = AxiosResponse<void>\nexport type StreamingControllerPatchRequestParamsV2Result = AxiosResponse<void>\nexport type StreamingControllerGetAudioV2Result = AxiosResponse<Blob>\n\nexport const getFileControllerUploadV2ResponseMock = (\n overrideResponse: Partial<AudioUploadResponse> = {}\n): AudioUploadResponse => ({\n audio_url: faker.internet.url(),\n audio_metadata: {\n ...{\n id: faker.string.uuid(),\n filename: faker.string.alpha(20),\n source: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n extension: faker.string.uuid(),\n size: faker.number.int({ min: undefined, max: undefined }),\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_channels: faker.number.int({ min: undefined, max: undefined })\n }\n },\n ...overrideResponse\n})\n\nexport const getPreRecordedControllerInitPreRecordedJobV2ResponseMock = (\n overrideResponse: Partial<InitPreRecordedTranscriptionResponse> = {}\n): InitPreRecordedTranscriptionResponse => ({\n id: faker.string.uuid(),\n result_url: faker.internet.url(),\n ...overrideResponse\n})\n\nexport const getPreRecordedControllerGetPreRecordedJobsV2ResponseCustomVocabularyEntryDTOMock = (\n overrideResponse: Partial<CustomVocabularyEntryDTO> = {}\n): CustomVocabularyEntryDTO => ({\n ...{\n value: faker.string.alpha(20),\n intensity: faker.helpers.arrayElement([faker.number.int({ min: 0, max: 1 }), undefined]),\n pronunciations: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getPreRecordedControllerGetPreRecordedJobsV2ResponseMock = (\n overrideResponse: Partial<ListPreRecordedResponse> = {}\n): ListPreRecordedResponse => ({\n first: faker.internet.url(),\n current: faker.internet.url(),\n next: faker.helpers.arrayElement([faker.internet.url(), null]),\n items: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"pre-recorded\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n context_prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n custom_vocabulary: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getPreRecordedControllerGetPreRecordedJobsV2ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n detect_language: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n enable_code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n code_switching_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n callback_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.internet.url(),\n method: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(CallbackMethodEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n subtitles_config: faker.helpers.arrayElement([\n {\n ...{\n formats: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(SubtitlesFormatEnum)),\n undefined\n ]),\n minimum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n maximum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 30 }),\n undefined\n ]),\n maximum_characters_per_row: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maximum_rows_per_caption: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 5 }),\n undefined\n ]),\n style: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubtitlesStyleEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n diarization_config: faker.helpers.arrayElement([\n {\n ...{\n number_of_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n min_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n max_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n name_consistency: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n structured_data_extraction_config: faker.helpers.arrayElement([\n {\n ...{\n classes: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm_config: faker.helpers.arrayElement([\n {\n ...{\n prompts: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n display_mode: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n punctuation_enhanced: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n audio_url: faker.helpers.arrayElement([faker.internet.url(), null])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n name_consistency: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n speaker_reidentification: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n audio_to_llm: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {\n ...{\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n response: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n }\n }))\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n display_mode: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ])\n }\n },\n undefined\n ])\n })),\n ...overrideResponse\n})\n\nexport const getPreRecordedControllerGetPreRecordedJobV2ResponseCustomVocabularyEntryDTOMock = (\n overrideResponse: Partial<CustomVocabularyEntryDTO> = {}\n): CustomVocabularyEntryDTO => ({\n ...{\n value: faker.string.alpha(20),\n intensity: faker.helpers.arrayElement([faker.number.int({ min: 0, max: 1 }), undefined]),\n pronunciations: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getPreRecordedControllerGetPreRecordedJobV2ResponseMock = (\n overrideResponse: Partial<PreRecordedResponse> = {}\n): PreRecordedResponse => ({\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"pre-recorded\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n context_prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n custom_vocabulary: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getPreRecordedControllerGetPreRecordedJobV2ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n detect_language: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n enable_code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n code_switching_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n callback_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.internet.url(),\n method: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(CallbackMethodEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n subtitles_config: faker.helpers.arrayElement([\n {\n ...{\n formats: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(SubtitlesFormatEnum)),\n undefined\n ]),\n minimum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n maximum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 30 }),\n undefined\n ]),\n maximum_characters_per_row: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maximum_rows_per_caption: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 5 }),\n undefined\n ]),\n style: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubtitlesStyleEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n diarization_config: faker.helpers.arrayElement([\n {\n ...{\n number_of_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n min_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n max_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n named_entity_recognition: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n name_consistency: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n structured_data_extraction_config: faker.helpers.arrayElement([\n {\n ...{\n classes: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm_config: faker.helpers.arrayElement([\n {\n ...{\n prompts: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n display_mode: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n punctuation_enhanced: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n audio_url: faker.helpers.arrayElement([faker.internet.url(), null])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n sentences: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })\n ),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })\n ),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(Object.values(TranslationLanguageCodeEnum)),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n name_consistency: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n speaker_reidentification: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n audio_to_llm: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {\n ...{\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n response: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n }\n }))\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n display_mode: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ])\n }\n },\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getPreRecordedControllerGetAudioV2ResponseMock = (): Blob =>\n new Blob(faker.helpers.arrayElements(faker.word.words(10).split(\" \")))\n\nexport const getTranscriptionControllerInitPreRecordedJobV2ResponseMock = (\n overrideResponse: Partial<InitPreRecordedTranscriptionResponse> = {}\n): InitPreRecordedTranscriptionResponse => ({\n id: faker.string.uuid(),\n result_url: faker.internet.url(),\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerListV2ResponseCustomVocabularyEntryDTOMock = (\n overrideResponse: Partial<CustomVocabularyEntryDTO> = {}\n): CustomVocabularyEntryDTO => ({\n ...{\n value: faker.string.alpha(20),\n intensity: faker.helpers.arrayElement([faker.number.int({ min: 0, max: 1 }), undefined]),\n pronunciations: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerListV2ResponsePreRecordedResponseMock = (\n overrideResponse: Partial<PreRecordedResponse> = {}\n): PreRecordedResponse => ({\n ...{\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"pre-recorded\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n context_prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n custom_vocabulary: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n { ...getTranscriptionControllerListV2ResponseCustomVocabularyEntryDTOMock() },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n detect_language: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n enable_code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n code_switching_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n callback_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.internet.url(),\n method: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(CallbackMethodEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n subtitles_config: faker.helpers.arrayElement([\n {\n ...{\n formats: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(SubtitlesFormatEnum)),\n undefined\n ]),\n minimum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n maximum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 30 }),\n undefined\n ]),\n maximum_characters_per_row: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maximum_rows_per_caption: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 5 }),\n undefined\n ]),\n style: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubtitlesStyleEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n diarization_config: faker.helpers.arrayElement([\n {\n ...{\n number_of_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n min_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n max_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n name_consistency: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n structured_data_extraction_config: faker.helpers.arrayElement([\n {\n ...{\n classes: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm_config: faker.helpers.arrayElement([\n {\n ...{\n prompts: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n display_mode: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n punctuation_enhanced: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n audio_url: faker.helpers.arrayElement([faker.internet.url(), null])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n name_consistency: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n speaker_reidentification: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n audio_to_llm: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {\n ...{\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n response: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n }\n }))\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n display_mode: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ])\n }\n },\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerListV2ResponseStreamingResponseMock = (\n overrideResponse: Partial<StreamingResponse> = {}\n): StreamingResponse => ({\n ...{\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"live\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n encoding: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedEncodingEnum)),\n undefined\n ]),\n bit_depth: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8, 16, 24, 32] as const),\n undefined\n ]),\n sample_rate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8000, 16000, 32000, 44100, 48000] as const),\n undefined\n ]),\n channels: faker.helpers.arrayElement([faker.number.int({ min: 1, max: 8 }), undefined]),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedModels)),\n undefined\n ]),\n endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 0.01, max: 10 }),\n undefined\n ]),\n maximum_duration_without_endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 5, max: 60 }),\n undefined\n ]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n pre_processing: faker.helpers.arrayElement([\n {\n ...{\n audio_enhancer: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n speech_threshold: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n realtime_processing: faker.helpers.arrayElement([\n {\n ...{\n custom_vocabulary: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getTranscriptionControllerListV2ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n post_processing: faker.helpers.arrayElement([\n {\n ...{\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n messages_config: faker.helpers.arrayElement([\n {\n ...{\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n messages: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ])\n }\n },\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerListV2ResponseMock = (\n overrideResponse: Partial<ListTranscriptionResponse> = {}\n): ListTranscriptionResponse => ({\n first: faker.internet.url(),\n current: faker.internet.url(),\n next: faker.helpers.arrayElement([faker.internet.url(), null]),\n items: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.helpers.arrayElement([\n { ...getTranscriptionControllerListV2ResponsePreRecordedResponseMock() },\n { ...getTranscriptionControllerListV2ResponseStreamingResponseMock() }\n ])\n ),\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerGetTranscriptV2ResponseCustomVocabularyEntryDTOMock = (\n overrideResponse: Partial<CustomVocabularyEntryDTO> = {}\n): CustomVocabularyEntryDTO => ({\n ...{\n value: faker.string.alpha(20),\n intensity: faker.helpers.arrayElement([faker.number.int({ min: 0, max: 1 }), undefined]),\n pronunciations: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerGetTranscriptV2ResponsePreRecordedResponseMock = (\n overrideResponse: Partial<PreRecordedResponse> = {}\n): PreRecordedResponse => ({\n ...{\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"pre-recorded\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n context_prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n custom_vocabulary: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getTranscriptionControllerGetTranscriptV2ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n detect_language: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n enable_code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n code_switching_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n callback_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.internet.url(),\n method: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(CallbackMethodEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n subtitles_config: faker.helpers.arrayElement([\n {\n ...{\n formats: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(SubtitlesFormatEnum)),\n undefined\n ]),\n minimum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n maximum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 30 }),\n undefined\n ]),\n maximum_characters_per_row: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maximum_rows_per_caption: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 5 }),\n undefined\n ]),\n style: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubtitlesStyleEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n diarization_config: faker.helpers.arrayElement([\n {\n ...{\n number_of_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n min_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n max_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n name_consistency: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n structured_data_extraction_config: faker.helpers.arrayElement([\n {\n ...{\n classes: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm_config: faker.helpers.arrayElement([\n {\n ...{\n prompts: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n display_mode: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n punctuation_enhanced: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n audio_url: faker.helpers.arrayElement([faker.internet.url(), null])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n name_consistency: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n speaker_reidentification: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n audio_to_llm: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {\n ...{\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n response: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n }\n }))\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n display_mode: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ])\n }\n },\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerGetTranscriptV2ResponseStreamingResponseMock = (\n overrideResponse: Partial<StreamingResponse> = {}\n): StreamingResponse => ({\n ...{\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"live\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n encoding: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedEncodingEnum)),\n undefined\n ]),\n bit_depth: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8, 16, 24, 32] as const),\n undefined\n ]),\n sample_rate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8000, 16000, 32000, 44100, 48000] as const),\n undefined\n ]),\n channels: faker.helpers.arrayElement([faker.number.int({ min: 1, max: 8 }), undefined]),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedModels)),\n undefined\n ]),\n endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 0.01, max: 10 }),\n undefined\n ]),\n maximum_duration_without_endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 5, max: 60 }),\n undefined\n ]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n pre_processing: faker.helpers.arrayElement([\n {\n ...{\n audio_enhancer: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n speech_threshold: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n realtime_processing: faker.helpers.arrayElement([\n {\n ...{\n custom_vocabulary: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getTranscriptionControllerGetTranscriptV2ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n post_processing: faker.helpers.arrayElement([\n {\n ...{\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n messages_config: faker.helpers.arrayElement([\n {\n ...{\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n messages: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ])\n }\n },\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerGetTranscriptV2ResponseMock =\n (): TranscriptionControllerGetTranscriptV2200 =>\n faker.helpers.arrayElement([\n { ...getTranscriptionControllerGetTranscriptV2ResponsePreRecordedResponseMock() },\n { ...getTranscriptionControllerGetTranscriptV2ResponseStreamingResponseMock() }\n ])\n\nexport const getTranscriptionControllerGetAudioV2ResponseMock = (): Blob =>\n new Blob(faker.helpers.arrayElements(faker.word.words(10).split(\" \")))\n\nexport const getHistoryControllerGetListV1ResponseCustomVocabularyEntryDTOMock = (\n overrideResponse: Partial<CustomVocabularyEntryDTO> = {}\n): CustomVocabularyEntryDTO => ({\n ...{\n value: faker.string.alpha(20),\n intensity: faker.helpers.arrayElement([faker.number.int({ min: 0, max: 1 }), undefined]),\n pronunciations: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getHistoryControllerGetListV1ResponsePreRecordedResponseMock = (\n overrideResponse: Partial<PreRecordedResponse> = {}\n): PreRecordedResponse => ({\n ...{\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"pre-recorded\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n context_prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n custom_vocabulary: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n { ...getHistoryControllerGetListV1ResponseCustomVocabularyEntryDTOMock() },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n detect_language: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n enable_code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n code_switching_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n callback_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.internet.url(),\n method: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(CallbackMethodEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n subtitles_config: faker.helpers.arrayElement([\n {\n ...{\n formats: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(SubtitlesFormatEnum)),\n undefined\n ]),\n minimum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n maximum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 30 }),\n undefined\n ]),\n maximum_characters_per_row: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maximum_rows_per_caption: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 5 }),\n undefined\n ]),\n style: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubtitlesStyleEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n diarization_config: faker.helpers.arrayElement([\n {\n ...{\n number_of_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n min_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n max_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n name_consistency: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n structured_data_extraction_config: faker.helpers.arrayElement([\n {\n ...{\n classes: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm_config: faker.helpers.arrayElement([\n {\n ...{\n prompts: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n display_mode: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n punctuation_enhanced: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n audio_url: faker.helpers.arrayElement([faker.internet.url(), null])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n name_consistency: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n speaker_reidentification: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n audio_to_llm: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {\n ...{\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n response: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n }\n }))\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n display_mode: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ])\n }\n },\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getHistoryControllerGetListV1ResponseStreamingResponseMock = (\n overrideResponse: Partial<StreamingResponse> = {}\n): StreamingResponse => ({\n ...{\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"live\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n encoding: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedEncodingEnum)),\n undefined\n ]),\n bit_depth: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8, 16, 24, 32] as const),\n undefined\n ]),\n sample_rate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8000, 16000, 32000, 44100, 48000] as const),\n undefined\n ]),\n channels: faker.helpers.arrayElement([faker.number.int({ min: 1, max: 8 }), undefined]),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedModels)),\n undefined\n ]),\n endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 0.01, max: 10 }),\n undefined\n ]),\n maximum_duration_without_endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 5, max: 60 }),\n undefined\n ]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n pre_processing: faker.helpers.arrayElement([\n {\n ...{\n audio_enhancer: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n speech_threshold: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n realtime_processing: faker.helpers.arrayElement([\n {\n ...{\n custom_vocabulary: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getHistoryControllerGetListV1ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n post_processing: faker.helpers.arrayElement([\n {\n ...{\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n messages_config: faker.helpers.arrayElement([\n {\n ...{\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n messages: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ])\n }\n },\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getHistoryControllerGetListV1ResponseMock = (\n overrideResponse: Partial<ListHistoryResponse> = {}\n): ListHistoryResponse => ({\n first: faker.internet.url(),\n current: faker.internet.url(),\n next: faker.helpers.arrayElement([faker.internet.url(), null]),\n items: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.helpers.arrayElement([\n { ...getHistoryControllerGetListV1ResponsePreRecordedResponseMock() },\n { ...getHistoryControllerGetListV1ResponseStreamingResponseMock() }\n ])\n ),\n ...overrideResponse\n})\n\nexport const getStreamingControllerInitStreamingSessionV2ResponseMock = (\n overrideResponse: Partial<InitStreamingResponse> = {}\n): InitStreamingResponse => ({\n id: faker.string.uuid(),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n url: faker.internet.url(),\n ...overrideResponse\n})\n\nexport const getStreamingControllerGetStreamingJobsV2ResponseCustomVocabularyEntryDTOMock = (\n overrideResponse: Partial<CustomVocabularyEntryDTO> = {}\n): CustomVocabularyEntryDTO => ({\n ...{\n value: faker.string.alpha(20),\n intensity: faker.helpers.arrayElement([faker.number.int({ min: 0, max: 1 }), undefined]),\n pronunciations: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getStreamingControllerGetStreamingJobsV2ResponseMock = (\n overrideResponse: Partial<ListStreamingResponse> = {}\n): ListStreamingResponse => ({\n first: faker.internet.url(),\n current: faker.internet.url(),\n next: faker.helpers.arrayElement([faker.internet.url(), null]),\n items: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"live\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n encoding: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedEncodingEnum)),\n undefined\n ]),\n bit_depth: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8, 16, 24, 32] as const),\n undefined\n ]),\n sample_rate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8000, 16000, 32000, 44100, 48000] as const),\n undefined\n ]),\n channels: faker.helpers.arrayElement([faker.number.int({ min: 1, max: 8 }), undefined]),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedModels)),\n undefined\n ]),\n endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 0.01, max: 10 }),\n undefined\n ]),\n maximum_duration_without_endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 5, max: 60 }),\n undefined\n ]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n pre_processing: faker.helpers.arrayElement([\n {\n ...{\n audio_enhancer: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n speech_threshold: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n realtime_processing: faker.helpers.arrayElement([\n {\n ...{\n custom_vocabulary: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getStreamingControllerGetStreamingJobsV2ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n post_processing: faker.helpers.arrayElement([\n {\n ...{\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n messages_config: faker.helpers.arrayElement([\n {\n ...{\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n messages: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ])\n }\n },\n undefined\n ])\n })),\n ...overrideResponse\n})\n\nexport const getStreamingControllerGetStreamingJobV2ResponseCustomVocabularyEntryDTOMock = (\n overrideResponse: Partial<CustomVocabularyEntryDTO> = {}\n): CustomVocabularyEntryDTO => ({\n ...{\n value: faker.string.alpha(20),\n intensity: faker.helpers.arrayElement([faker.number.int({ min: 0, max: 1 }), undefined]),\n pronunciations: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getStreamingControllerGetStreamingJobV2ResponseMock = (\n overrideResponse: Partial<StreamingResponse> = {}\n): StreamingResponse => ({\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"live\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n encoding: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedEncodingEnum)),\n undefined\n ]),\n bit_depth: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8, 16, 24, 32] as const),\n undefined\n ]),\n sample_rate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8000, 16000, 32000, 44100, 48000] as const),\n undefined\n ]),\n channels: faker.helpers.arrayElement([faker.number.int({ min: 1, max: 8 }), undefined]),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedModels)),\n undefined\n ]),\n endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 0.01, max: 10 }),\n undefined\n ]),\n maximum_duration_without_endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 5, max: 60 }),\n undefined\n ]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n pre_processing: faker.helpers.arrayElement([\n {\n ...{\n audio_enhancer: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n speech_threshold: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n realtime_processing: faker.helpers.arrayElement([\n {\n ...{\n custom_vocabulary: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getStreamingControllerGetStreamingJobV2ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n post_processing: faker.helpers.arrayElement([\n {\n ...{\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n messages_config: faker.helpers.arrayElement([\n {\n ...{\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n sentences: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })\n ),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })\n ),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(Object.values(TranslationLanguageCodeEnum)),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n messages: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ])\n }\n },\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getStreamingControllerGetAudioV2ResponseMock = (): Blob =>\n new Blob(faker.helpers.arrayElements(faker.word.words(10).split(\" \")))\n\nexport const getFileControllerUploadV2MockHandler = (\n overrideResponse?:\n | AudioUploadResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<AudioUploadResponse> | AudioUploadResponse)\n) => {\n return http.post(\"https://api.gladia.io/v2/upload\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getFileControllerUploadV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getPreRecordedControllerInitPreRecordedJobV2MockHandler = (\n overrideResponse?:\n | InitPreRecordedTranscriptionResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<InitPreRecordedTranscriptionResponse> | InitPreRecordedTranscriptionResponse)\n) => {\n return http.post(\"https://api.gladia.io/v2/pre-recorded\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getPreRecordedControllerInitPreRecordedJobV2ResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getPreRecordedControllerGetPreRecordedJobsV2MockHandler = (\n overrideResponse?:\n | ListPreRecordedResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ListPreRecordedResponse> | ListPreRecordedResponse)\n) => {\n return http.get(\"https://api.gladia.io/v2/pre-recorded\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getPreRecordedControllerGetPreRecordedJobsV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getPreRecordedControllerGetPreRecordedJobV2MockHandler = (\n overrideResponse?:\n | PreRecordedResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PreRecordedResponse> | PreRecordedResponse)\n) => {\n return http.get(\"https://api.gladia.io/v2/pre-recorded/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getPreRecordedControllerGetPreRecordedJobV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getPreRecordedControllerDeletePreRecordedJobV2MockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.gladia.io/v2/pre-recorded/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 202 })\n })\n}\n\nexport const getPreRecordedControllerGetAudioV2MockHandler = (\n overrideResponse?:\n | Blob\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Blob> | Blob)\n) => {\n return http.get(\"https://api.gladia.io/v2/pre-recorded/:id/file\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getPreRecordedControllerGetAudioV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionControllerInitPreRecordedJobV2MockHandler = (\n overrideResponse?:\n | InitPreRecordedTranscriptionResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<InitPreRecordedTranscriptionResponse> | InitPreRecordedTranscriptionResponse)\n) => {\n return http.post(\"https://api.gladia.io/v2/transcription\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionControllerInitPreRecordedJobV2ResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionControllerListV2MockHandler = (\n overrideResponse?:\n | ListTranscriptionResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ListTranscriptionResponse> | ListTranscriptionResponse)\n) => {\n return http.get(\"https://api.gladia.io/v2/transcription\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionControllerListV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionControllerGetTranscriptV2MockHandler = (\n overrideResponse?:\n | TranscriptionControllerGetTranscriptV2200\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) =>\n | Promise<TranscriptionControllerGetTranscriptV2200>\n | TranscriptionControllerGetTranscriptV2200)\n) => {\n return http.get(\"https://api.gladia.io/v2/transcription/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionControllerGetTranscriptV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionControllerDeleteTranscriptV2MockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.gladia.io/v2/transcription/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 202 })\n })\n}\n\nexport const getTranscriptionControllerGetAudioV2MockHandler = (\n overrideResponse?:\n | Blob\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Blob> | Blob)\n) => {\n return http.get(\"https://api.gladia.io/v2/transcription/:id/file\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionControllerGetAudioV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getAudioToTextControllerAudioTranscriptionMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<void> | void)\n) => {\n return http.post(\"https://api.gladia.io/audio/text/audio-transcription\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 200 })\n })\n}\n\nexport const getVideoToTextControllerVideoTranscriptionMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<void> | void)\n) => {\n return http.post(\"https://api.gladia.io/video/text/video-transcription\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 200 })\n })\n}\n\nexport const getHistoryControllerGetListV1MockHandler = (\n overrideResponse?:\n | ListHistoryResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ListHistoryResponse> | ListHistoryResponse)\n) => {\n return http.get(\"https://api.gladia.io/v1/history\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getHistoryControllerGetListV1ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getStreamingControllerInitStreamingSessionV2MockHandler = (\n overrideResponse?:\n | InitStreamingResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<InitStreamingResponse> | InitStreamingResponse)\n) => {\n return http.post(\"https://api.gladia.io/v2/live\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getStreamingControllerInitStreamingSessionV2ResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getStreamingControllerGetStreamingJobsV2MockHandler = (\n overrideResponse?:\n | ListStreamingResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ListStreamingResponse> | ListStreamingResponse)\n) => {\n return http.get(\"https://api.gladia.io/v2/live\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getStreamingControllerGetStreamingJobsV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getStreamingControllerGetStreamingJobV2MockHandler = (\n overrideResponse?:\n | StreamingResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<StreamingResponse> | StreamingResponse)\n) => {\n return http.get(\"https://api.gladia.io/v2/live/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getStreamingControllerGetStreamingJobV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getStreamingControllerDeleteStreamingJobV2MockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.gladia.io/v2/live/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 202 })\n })\n}\n\nexport const getStreamingControllerPatchRequestParamsV2MockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.patch>[1]>[0]) => Promise<void> | void)\n) => {\n return http.patch(\"https://api.gladia.io/v2/live/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getStreamingControllerGetAudioV2MockHandler = (\n overrideResponse?:\n | Blob\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Blob> | Blob)\n) => {\n return http.get(\"https://api.gladia.io/v2/live/:id/file\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getStreamingControllerGetAudioV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\nexport const getGladiaControlAPIMock = () => [\n getFileControllerUploadV2MockHandler(),\n getPreRecordedControllerInitPreRecordedJobV2MockHandler(),\n getPreRecordedControllerGetPreRecordedJobsV2MockHandler(),\n getPreRecordedControllerGetPreRecordedJobV2MockHandler(),\n getPreRecordedControllerDeletePreRecordedJobV2MockHandler(),\n getPreRecordedControllerGetAudioV2MockHandler(),\n getTranscriptionControllerInitPreRecordedJobV2MockHandler(),\n getTranscriptionControllerListV2MockHandler(),\n getTranscriptionControllerGetTranscriptV2MockHandler(),\n getTranscriptionControllerDeleteTranscriptV2MockHandler(),\n getTranscriptionControllerGetAudioV2MockHandler(),\n getAudioToTextControllerAudioTranscriptionMockHandler(),\n getVideoToTextControllerVideoTranscriptionMockHandler(),\n getHistoryControllerGetListV1MockHandler(),\n getStreamingControllerInitStreamingSessionV2MockHandler(),\n getStreamingControllerGetStreamingJobsV2MockHandler(),\n getStreamingControllerGetStreamingJobV2MockHandler(),\n getStreamingControllerDeleteStreamingJobV2MockHandler(),\n getStreamingControllerPatchRequestParamsV2MockHandler(),\n getStreamingControllerGetAudioV2MockHandler()\n]\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport * from \"./addonErrorDTO\"\nexport * from \"./audioChunkAckData\"\nexport * from \"./audioChunkAckMessage\"\nexport * from \"./audioChunkAckMessageData\"\nexport * from \"./audioChunkAckMessageError\"\nexport * from \"./audioChunkAckMessageType\"\nexport * from \"./audioChunkAction\"\nexport * from \"./audioChunkActionData\"\nexport * from \"./audioChunkActionType\"\nexport * from \"./audioToLlmDTO\"\nexport * from \"./audioToLlmDTOError\"\nexport * from \"./audioToLlmDTOResults\"\nexport * from \"./audioToLlmListConfigDTO\"\nexport * from \"./audioToLlmListDTO\"\nexport * from \"./audioToLlmListDTOError\"\nexport * from \"./audioToLlmResultDTO\"\nexport * from \"./audioToTextControllerAudioTranscriptionBody\"\nexport * from \"./audioToTextControllerAudioTranscriptionBodyLanguage\"\nexport * from \"./audioToTextControllerAudioTranscriptionBodyLanguageBehaviour\"\nexport * from \"./audioToTextControllerAudioTranscriptionBodyOutputFormat\"\nexport * from \"./audioToTextControllerAudioTranscriptionBodyTargetTranslationLanguage\"\nexport * from \"./audioUploadMetadataDTO\"\nexport * from \"./audioUploadResponse\"\nexport * from \"./badRequestErrorResponse\"\nexport * from \"./callbackConfig\"\nexport * from \"./callbackConfigDto\"\nexport * from \"./callbackLiveAudioChunkAckMessage\"\nexport * from \"./callbackLiveAudioChunkAckMessageEvent\"\nexport * from \"./callbackLiveEndRecordingMessage\"\nexport * from \"./callbackLiveEndRecordingMessageEvent\"\nexport * from \"./callbackLiveEndSessionMessage\"\nexport * from \"./callbackLiveEndSessionMessageEvent\"\nexport * from \"./callbackLiveNamedEntityRecognitionMessage\"\nexport * from \"./callbackLiveNamedEntityRecognitionMessageEvent\"\nexport * from \"./callbackLivePostChapterizationMessage\"\nexport * from \"./callbackLivePostChapterizationMessageEvent\"\nexport * from \"./callbackLivePostFinalTranscriptMessage\"\nexport * from \"./callbackLivePostFinalTranscriptMessageEvent\"\nexport * from \"./callbackLivePostSummarizationMessage\"\nexport * from \"./callbackLivePostSummarizationMessageEvent\"\nexport * from \"./callbackLivePostTranscriptMessage\"\nexport * from \"./callbackLivePostTranscriptMessageEvent\"\nexport * from \"./callbackLiveSentimentAnalysisMessage\"\nexport * from \"./callbackLiveSentimentAnalysisMessageEvent\"\nexport * from \"./callbackLiveSpeechEndMessage\"\nexport * from \"./callbackLiveSpeechEndMessageEvent\"\nexport * from \"./callbackLiveSpeechStartMessage\"\nexport * from \"./callbackLiveSpeechStartMessageEvent\"\nexport * from \"./callbackLiveStartRecordingMessage\"\nexport * from \"./callbackLiveStartRecordingMessageEvent\"\nexport * from \"./callbackLiveStartSessionMessage\"\nexport * from \"./callbackLiveStartSessionMessageEvent\"\nexport * from \"./callbackLiveStopRecordingAckMessage\"\nexport * from \"./callbackLiveStopRecordingAckMessageEvent\"\nexport * from \"./callbackLiveTranscriptMessage\"\nexport * from \"./callbackLiveTranscriptMessageEvent\"\nexport * from \"./callbackLiveTranslationMessage\"\nexport * from \"./callbackLiveTranslationMessageEvent\"\nexport * from \"./callbackMethodEnum\"\nexport * from \"./callbackTranscriptionErrorPayload\"\nexport * from \"./callbackTranscriptionErrorPayloadCustomMetadata\"\nexport * from \"./callbackTranscriptionErrorPayloadEvent\"\nexport * from \"./callbackTranscriptionSuccessPayload\"\nexport * from \"./callbackTranscriptionSuccessPayloadCustomMetadata\"\nexport * from \"./callbackTranscriptionSuccessPayloadEvent\"\nexport * from \"./chapterizationDTO\"\nexport * from \"./chapterizationDTOError\"\nexport * from \"./chapterizationDTOResults\"\nexport * from \"./chapterizationSentence\"\nexport * from \"./codeSwitchingConfigDTO\"\nexport * from \"./customSpellingConfigDTO\"\nexport * from \"./customSpellingConfigDTOSpellingDictionary\"\nexport * from \"./customVocabularyConfigDTO\"\nexport * from \"./customVocabularyConfigDTOVocabularyItem\"\nexport * from \"./customVocabularyEntryDTO\"\nexport * from \"./diarizationConfigDTO\"\nexport * from \"./diarizationDTO\"\nexport * from \"./diarizationDTOError\"\nexport * from \"./displayModeDTO\"\nexport * from \"./displayModeDTOError\"\nexport * from \"./endRecordingMessage\"\nexport * from \"./endRecordingMessageData\"\nexport * from \"./endRecordingMessageType\"\nexport * from \"./endSessionMessage\"\nexport * from \"./endSessionMessageType\"\nexport * from \"./error\"\nexport * from \"./errorDTO\"\nexport * from \"./fileControllerUploadV2BodyOne\"\nexport * from \"./fileControllerUploadV2BodyTwo\"\nexport * from \"./fileResponse\"\nexport * from \"./forbiddenErrorResponse\"\nexport * from \"./historyControllerGetListV1KindItem\"\nexport * from \"./historyControllerGetListV1Params\"\nexport * from \"./historyControllerGetListV1StatusItem\"\nexport * from \"./initPreRecordedTranscriptionResponse\"\nexport * from \"./initStreamingResponse\"\nexport * from \"./initTranscriptionRequest\"\nexport * from \"./initTranscriptionRequestCustomMetadata\"\nexport * from \"./languageConfig\"\nexport * from \"./listHistoryResponse\"\nexport * from \"./listHistoryResponseItemsItem\"\nexport * from \"./listPreRecordedResponse\"\nexport * from \"./listStreamingResponse\"\nexport * from \"./listTranscriptionResponse\"\nexport * from \"./listTranscriptionResponseItemsItem\"\nexport * from \"./liveEventPayload\"\nexport * from \"./messagesConfig\"\nexport * from \"./moderationDTO\"\nexport * from \"./moderationDTOError\"\nexport * from \"./namedEntityRecognitionData\"\nexport * from \"./namedEntityRecognitionDTO\"\nexport * from \"./namedEntityRecognitionDTOError\"\nexport * from \"./namedEntityRecognitionMessage\"\nexport * from \"./namedEntityRecognitionMessageData\"\nexport * from \"./namedEntityRecognitionMessageError\"\nexport * from \"./namedEntityRecognitionMessageType\"\nexport * from \"./namedEntityRecognitionResult\"\nexport * from \"./namesConsistencyDTO\"\nexport * from \"./namesConsistencyDTOError\"\nexport * from \"./notFoundErrorResponse\"\nexport * from \"./patchRequestParamsDTO\"\nexport * from \"./payloadTooLargeErrorResponse\"\nexport * from \"./postChapterizationMessage\"\nexport * from \"./postChapterizationMessageData\"\nexport * from \"./postChapterizationMessageDataProperty\"\nexport * from \"./postChapterizationMessageError\"\nexport * from \"./postChapterizationMessageType\"\nexport * from \"./postChapterizationResult\"\nexport * from \"./postFinalTranscriptMessage\"\nexport * from \"./postFinalTranscriptMessageType\"\nexport * from \"./postProcessingConfig\"\nexport * from \"./postSummarizationMessage\"\nexport * from \"./postSummarizationMessageData\"\nexport * from \"./postSummarizationMessageDataProperty\"\nexport * from \"./postSummarizationMessageError\"\nexport * from \"./postSummarizationMessageType\"\nexport * from \"./postTranscriptMessage\"\nexport * from \"./postTranscriptMessageType\"\nexport * from \"./preProcessingConfig\"\nexport * from \"./preRecordedControllerGetPreRecordedJobsV2Params\"\nexport * from \"./preRecordedControllerGetPreRecordedJobsV2StatusItem\"\nexport * from \"./preRecordedEventPayload\"\nexport * from \"./preRecordedRequestParamsResponse\"\nexport * from \"./preRecordedResponse\"\nexport * from \"./preRecordedResponseCustomMetadata\"\nexport * from \"./preRecordedResponseFile\"\nexport * from \"./preRecordedResponseKind\"\nexport * from \"./preRecordedResponsePostSessionMetadata\"\nexport * from \"./preRecordedResponseRequestParams\"\nexport * from \"./preRecordedResponseResult\"\nexport * from \"./preRecordedResponseStatus\"\nexport * from \"./realtimeProcessingConfig\"\nexport * from \"./sentencesDTO\"\nexport * from \"./sentencesDTOError\"\nexport * from \"./sentimentAnalysisData\"\nexport * from \"./sentimentAnalysisDTO\"\nexport * from \"./sentimentAnalysisDTOError\"\nexport * from \"./sentimentAnalysisMessage\"\nexport * from \"./sentimentAnalysisMessageData\"\nexport * from \"./sentimentAnalysisMessageError\"\nexport * from \"./sentimentAnalysisMessageType\"\nexport * from \"./sentimentAnalysisResult\"\nexport * from \"./speakerReidentificationDTO\"\nexport * from \"./speakerReidentificationDTOError\"\nexport * from \"./speechEndMessage\"\nexport * from \"./speechEndMessageType\"\nexport * from \"./speechMessageData\"\nexport * from \"./speechStartMessage\"\nexport * from \"./speechStartMessageType\"\nexport * from \"./startRecordingMessage\"\nexport * from \"./startRecordingMessageType\"\nexport * from \"./startSessionMessage\"\nexport * from \"./startSessionMessageType\"\nexport * from \"./stopRecordingAckData\"\nexport * from \"./stopRecordingAckMessage\"\nexport * from \"./stopRecordingAckMessageData\"\nexport * from \"./stopRecordingAckMessageError\"\nexport * from \"./stopRecordingAckMessageType\"\nexport * from \"./stopRecordingAction\"\nexport * from \"./stopRecordingActionType\"\nexport * from \"./streamingControllerGetStreamingJobsV2Params\"\nexport * from \"./streamingControllerGetStreamingJobsV2StatusItem\"\nexport * from \"./streamingControllerInitStreamingSessionV2Params\"\nexport * from \"./streamingRequest\"\nexport * from \"./streamingRequestCustomMetadata\"\nexport * from \"./streamingRequestParamsResponse\"\nexport * from \"./streamingResponse\"\nexport * from \"./streamingResponseCustomMetadata\"\nexport * from \"./streamingResponseFile\"\nexport * from \"./streamingResponseKind\"\nexport * from \"./streamingResponsePostSessionMetadata\"\nexport * from \"./streamingResponseRequestParams\"\nexport * from \"./streamingResponseResult\"\nexport * from \"./streamingResponseStatus\"\nexport * from \"./streamingSupportedBitDepthEnum\"\nexport * from \"./streamingSupportedEncodingEnum\"\nexport * from \"./streamingSupportedModels\"\nexport * from \"./streamingSupportedRegions\"\nexport * from \"./streamingSupportedSampleRateEnum\"\nexport * from \"./streamingTranscriptionResultDTO\"\nexport * from \"./streamingTranscriptionResultWithMessagesDTO\"\nexport * from \"./structuredDataExtractionConfigDTO\"\nexport * from \"./structuredDataExtractionDTO\"\nexport * from \"./structuredDataExtractionDTOError\"\nexport * from \"./subtitleDTO\"\nexport * from \"./subtitlesConfigDTO\"\nexport * from \"./subtitlesFormatEnum\"\nexport * from \"./subtitlesStyleEnum\"\nexport * from \"./summarizationConfigDTO\"\nexport * from \"./summarizationDTO\"\nexport * from \"./summarizationDTOError\"\nexport * from \"./summaryTypesEnum\"\nexport * from \"./transcriptionControllerGetTranscriptV2200\"\nexport * from \"./transcriptionControllerListV2KindItem\"\nexport * from \"./transcriptionControllerListV2Params\"\nexport * from \"./transcriptionControllerListV2StatusItem\"\nexport * from \"./transcriptionDTO\"\nexport * from \"./transcriptionLanguageCodeEnum\"\nexport * from \"./transcriptionMetadataDTO\"\nexport * from \"./transcriptionResultDTO\"\nexport * from \"./transcriptMessage\"\nexport * from \"./transcriptMessageData\"\nexport * from \"./transcriptMessageType\"\nexport * from \"./translationConfigDTO\"\nexport * from \"./translationData\"\nexport * from \"./translationDTO\"\nexport * from \"./translationDTOError\"\nexport * from \"./translationLanguageCodeEnum\"\nexport * from \"./translationMessage\"\nexport * from \"./translationMessageData\"\nexport * from \"./translationMessageError\"\nexport * from \"./translationMessageType\"\nexport * from \"./translationModelEnum\"\nexport * from \"./translationResultDTO\"\nexport * from \"./translationResultDTOError\"\nexport * from \"./unauthorizedErrorResponse\"\nexport * from \"./unprocessableEntityErrorResponse\"\nexport * from \"./uploadBody\"\nexport * from \"./utteranceDTO\"\nexport * from \"./videoToTextControllerVideoTranscriptionBody\"\nexport * from \"./videoToTextControllerVideoTranscriptionBodyLanguage\"\nexport * from \"./videoToTextControllerVideoTranscriptionBodyLanguageBehaviour\"\nexport * from \"./videoToTextControllerVideoTranscriptionBodyOutputFormat\"\nexport * from \"./videoToTextControllerVideoTranscriptionBodyTargetTranslationLanguage\"\nexport * from \"./webhookLiveEndRecordingPayload\"\nexport * from \"./webhookLiveEndRecordingPayloadEvent\"\nexport * from \"./webhookLiveEndSessionPayload\"\nexport * from \"./webhookLiveEndSessionPayloadEvent\"\nexport * from \"./webhookLiveStartRecordingPayload\"\nexport * from \"./webhookLiveStartRecordingPayloadEvent\"\nexport * from \"./webhookLiveStartSessionPayload\"\nexport * from \"./webhookLiveStartSessionPayloadEvent\"\nexport * from \"./webhookTranscriptionCreatedPayload\"\nexport * from \"./webhookTranscriptionCreatedPayloadEvent\"\nexport * from \"./webhookTranscriptionErrorPayload\"\nexport * from \"./webhookTranscriptionErrorPayloadEvent\"\nexport * from \"./webhookTranscriptionSuccessPayload\"\nexport * from \"./webhookTranscriptionSuccessPayloadEvent\"\nexport * from \"./wordDTO\"\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type AudioChunkAckMessageType =\n (typeof AudioChunkAckMessageType)[keyof typeof AudioChunkAckMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const AudioChunkAckMessageType = {\n audio_chunk: \"audio_chunk\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type AudioChunkActionType = (typeof AudioChunkActionType)[keyof typeof AudioChunkActionType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const AudioChunkActionType = {\n audio_chunk: \"audio_chunk\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type AudioToTextControllerAudioTranscriptionBodyLanguage =\n (typeof AudioToTextControllerAudioTranscriptionBodyLanguage)[keyof typeof AudioToTextControllerAudioTranscriptionBodyLanguage]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const AudioToTextControllerAudioTranscriptionBodyLanguage = {\n afrikaans: \"afrikaans\",\n albanian: \"albanian\",\n amharic: \"amharic\",\n arabic: \"arabic\",\n armenian: \"armenian\",\n assamese: \"assamese\",\n azerbaijani: \"azerbaijani\",\n bashkir: \"bashkir\",\n basque: \"basque\",\n belarusian: \"belarusian\",\n bengali: \"bengali\",\n bosnian: \"bosnian\",\n breton: \"breton\",\n bulgarian: \"bulgarian\",\n catalan: \"catalan\",\n chinese: \"chinese\",\n croatian: \"croatian\",\n czech: \"czech\",\n danish: \"danish\",\n dutch: \"dutch\",\n english: \"english\",\n estonian: \"estonian\",\n faroese: \"faroese\",\n finnish: \"finnish\",\n french: \"french\",\n galician: \"galician\",\n georgian: \"georgian\",\n german: \"german\",\n greek: \"greek\",\n gujarati: \"gujarati\",\n haitian_creole: \"haitian creole\",\n hausa: \"hausa\",\n hawaiian: \"hawaiian\",\n hebrew: \"hebrew\",\n hindi: \"hindi\",\n hungarian: \"hungarian\",\n icelandic: \"icelandic\",\n indonesian: \"indonesian\",\n italian: \"italian\",\n japanese: \"japanese\",\n javanese: \"javanese\",\n kannada: \"kannada\",\n kazakh: \"kazakh\",\n khmer: \"khmer\",\n korean: \"korean\",\n lao: \"lao\",\n latin: \"latin\",\n latvian: \"latvian\",\n lingala: \"lingala\",\n lithuanian: \"lithuanian\",\n luxembourgish: \"luxembourgish\",\n macedonian: \"macedonian\",\n malagasy: \"malagasy\",\n malay: \"malay\",\n malayalam: \"malayalam\",\n maltese: \"maltese\",\n maori: \"maori\",\n marathi: \"marathi\",\n mongolian: \"mongolian\",\n myanmar: \"myanmar\",\n nepali: \"nepali\",\n norwegian: \"norwegian\",\n nynorsk: \"nynorsk\",\n occitan: \"occitan\",\n pashto: \"pashto\",\n persian: \"persian\",\n polish: \"polish\",\n portuguese: \"portuguese\",\n punjabi: \"punjabi\",\n romanian: \"romanian\",\n russian: \"russian\",\n sanskrit: \"sanskrit\",\n serbian: \"serbian\",\n shona: \"shona\",\n sindhi: \"sindhi\",\n sinhala: \"sinhala\",\n slovak: \"slovak\",\n slovenian: \"slovenian\",\n somali: \"somali\",\n spanish: \"spanish\",\n sundanese: \"sundanese\",\n swahili: \"swahili\",\n swedish: \"swedish\",\n tagalog: \"tagalog\",\n tajik: \"tajik\",\n tamil: \"tamil\",\n tatar: \"tatar\",\n telugu: \"telugu\",\n thai: \"thai\",\n tibetan: \"tibetan\",\n turkish: \"turkish\",\n turkmen: \"turkmen\",\n ukrainian: \"ukrainian\",\n urdu: \"urdu\",\n uzbek: \"uzbek\",\n vietnamese: \"vietnamese\",\n welsh: \"welsh\",\n yiddish: \"yiddish\",\n yoruba: \"yoruba\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type AudioToTextControllerAudioTranscriptionBodyLanguageBehaviour =\n (typeof AudioToTextControllerAudioTranscriptionBodyLanguageBehaviour)[keyof typeof AudioToTextControllerAudioTranscriptionBodyLanguageBehaviour]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const AudioToTextControllerAudioTranscriptionBodyLanguageBehaviour = {\n automatic_single_language: \"automatic single language\",\n automatic_multiple_languages: \"automatic multiple languages\",\n manual: \"manual\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type AudioToTextControllerAudioTranscriptionBodyOutputFormat =\n (typeof AudioToTextControllerAudioTranscriptionBodyOutputFormat)[keyof typeof AudioToTextControllerAudioTranscriptionBodyOutputFormat]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const AudioToTextControllerAudioTranscriptionBodyOutputFormat = {\n json: \"json\",\n srt: \"srt\",\n vtt: \"vtt\",\n plain: \"plain\",\n txt: \"txt\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type AudioToTextControllerAudioTranscriptionBodyTargetTranslationLanguage =\n (typeof AudioToTextControllerAudioTranscriptionBodyTargetTranslationLanguage)[keyof typeof AudioToTextControllerAudioTranscriptionBodyTargetTranslationLanguage]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const AudioToTextControllerAudioTranscriptionBodyTargetTranslationLanguage = {\n afrikaans: \"afrikaans\",\n albanian: \"albanian\",\n amharic: \"amharic\",\n arabic: \"arabic\",\n armenian: \"armenian\",\n assamese: \"assamese\",\n azerbaijani: \"azerbaijani\",\n bashkir: \"bashkir\",\n basque: \"basque\",\n belarusian: \"belarusian\",\n bengali: \"bengali\",\n bosnian: \"bosnian\",\n breton: \"breton\",\n bulgarian: \"bulgarian\",\n catalan: \"catalan\",\n chinese: \"chinese\",\n croatian: \"croatian\",\n czech: \"czech\",\n danish: \"danish\",\n dutch: \"dutch\",\n english: \"english\",\n estonian: \"estonian\",\n faroese: \"faroese\",\n finnish: \"finnish\",\n french: \"french\",\n galician: \"galician\",\n georgian: \"georgian\",\n german: \"german\",\n greek: \"greek\",\n gujarati: \"gujarati\",\n haitian_creole: \"haitian creole\",\n hausa: \"hausa\",\n hawaiian: \"hawaiian\",\n hebrew: \"hebrew\",\n hindi: \"hindi\",\n hungarian: \"hungarian\",\n icelandic: \"icelandic\",\n indonesian: \"indonesian\",\n italian: \"italian\",\n japanese: \"japanese\",\n javanese: \"javanese\",\n kannada: \"kannada\",\n kazakh: \"kazakh\",\n khmer: \"khmer\",\n korean: \"korean\",\n lao: \"lao\",\n latin: \"latin\",\n latvian: \"latvian\",\n lingala: \"lingala\",\n lithuanian: \"lithuanian\",\n luxembourgish: \"luxembourgish\",\n macedonian: \"macedonian\",\n malagasy: \"malagasy\",\n malay: \"malay\",\n malayalam: \"malayalam\",\n maltese: \"maltese\",\n maori: \"maori\",\n marathi: \"marathi\",\n mongolian: \"mongolian\",\n myanmar: \"myanmar\",\n nepali: \"nepali\",\n norwegian: \"norwegian\",\n nynorsk: \"nynorsk\",\n occitan: \"occitan\",\n pashto: \"pashto\",\n persian: \"persian\",\n polish: \"polish\",\n portuguese: \"portuguese\",\n punjabi: \"punjabi\",\n romanian: \"romanian\",\n russian: \"russian\",\n sanskrit: \"sanskrit\",\n serbian: \"serbian\",\n shona: \"shona\",\n sindhi: \"sindhi\",\n sinhala: \"sinhala\",\n slovak: \"slovak\",\n slovenian: \"slovenian\",\n somali: \"somali\",\n spanish: \"spanish\",\n sundanese: \"sundanese\",\n swahili: \"swahili\",\n swedish: \"swedish\",\n tagalog: \"tagalog\",\n tajik: \"tajik\",\n tamil: \"tamil\",\n tatar: \"tatar\",\n telugu: \"telugu\",\n thai: \"thai\",\n tibetan: \"tibetan\",\n turkish: \"turkish\",\n turkmen: \"turkmen\",\n ukrainian: \"ukrainian\",\n urdu: \"urdu\",\n uzbek: \"uzbek\",\n vietnamese: \"vietnamese\",\n welsh: \"welsh\",\n wolof: \"wolof\",\n yiddish: \"yiddish\",\n yoruba: \"yoruba\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveAudioChunkAckMessageEvent =\n (typeof CallbackLiveAudioChunkAckMessageEvent)[keyof typeof CallbackLiveAudioChunkAckMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveAudioChunkAckMessageEvent = {\n liveaudio_chunk: \"live.audio_chunk\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveEndRecordingMessageEvent =\n (typeof CallbackLiveEndRecordingMessageEvent)[keyof typeof CallbackLiveEndRecordingMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveEndRecordingMessageEvent = {\n liveend_recording: \"live.end_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveEndSessionMessageEvent =\n (typeof CallbackLiveEndSessionMessageEvent)[keyof typeof CallbackLiveEndSessionMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveEndSessionMessageEvent = {\n liveend_session: \"live.end_session\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveNamedEntityRecognitionMessageEvent =\n (typeof CallbackLiveNamedEntityRecognitionMessageEvent)[keyof typeof CallbackLiveNamedEntityRecognitionMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveNamedEntityRecognitionMessageEvent = {\n livenamed_entity_recognition: \"live.named_entity_recognition\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLivePostChapterizationMessageEvent =\n (typeof CallbackLivePostChapterizationMessageEvent)[keyof typeof CallbackLivePostChapterizationMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLivePostChapterizationMessageEvent = {\n livepost_chapterization: \"live.post_chapterization\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLivePostFinalTranscriptMessageEvent =\n (typeof CallbackLivePostFinalTranscriptMessageEvent)[keyof typeof CallbackLivePostFinalTranscriptMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLivePostFinalTranscriptMessageEvent = {\n livepost_final_transcript: \"live.post_final_transcript\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLivePostSummarizationMessageEvent =\n (typeof CallbackLivePostSummarizationMessageEvent)[keyof typeof CallbackLivePostSummarizationMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLivePostSummarizationMessageEvent = {\n livepost_summarization: \"live.post_summarization\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLivePostTranscriptMessageEvent =\n (typeof CallbackLivePostTranscriptMessageEvent)[keyof typeof CallbackLivePostTranscriptMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLivePostTranscriptMessageEvent = {\n livepost_transcript: \"live.post_transcript\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveSentimentAnalysisMessageEvent =\n (typeof CallbackLiveSentimentAnalysisMessageEvent)[keyof typeof CallbackLiveSentimentAnalysisMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveSentimentAnalysisMessageEvent = {\n livesentiment_analysis: \"live.sentiment_analysis\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveSpeechEndMessageEvent =\n (typeof CallbackLiveSpeechEndMessageEvent)[keyof typeof CallbackLiveSpeechEndMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveSpeechEndMessageEvent = {\n livespeech_end: \"live.speech_end\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveSpeechStartMessageEvent =\n (typeof CallbackLiveSpeechStartMessageEvent)[keyof typeof CallbackLiveSpeechStartMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveSpeechStartMessageEvent = {\n livespeech_start: \"live.speech_start\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveStartRecordingMessageEvent =\n (typeof CallbackLiveStartRecordingMessageEvent)[keyof typeof CallbackLiveStartRecordingMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveStartRecordingMessageEvent = {\n livestart_recording: \"live.start_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveStartSessionMessageEvent =\n (typeof CallbackLiveStartSessionMessageEvent)[keyof typeof CallbackLiveStartSessionMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveStartSessionMessageEvent = {\n livestart_session: \"live.start_session\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveStopRecordingAckMessageEvent =\n (typeof CallbackLiveStopRecordingAckMessageEvent)[keyof typeof CallbackLiveStopRecordingAckMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveStopRecordingAckMessageEvent = {\n livestop_recording: \"live.stop_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveTranscriptMessageEvent =\n (typeof CallbackLiveTranscriptMessageEvent)[keyof typeof CallbackLiveTranscriptMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveTranscriptMessageEvent = {\n livetranscript: \"live.transcript\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveTranslationMessageEvent =\n (typeof CallbackLiveTranslationMessageEvent)[keyof typeof CallbackLiveTranslationMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveTranslationMessageEvent = {\n livetranslation: \"live.translation\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * The HTTP method to be used. Allowed values are `POST` or `PUT` (default: `POST`)\n */\nexport type CallbackMethodEnum = (typeof CallbackMethodEnum)[keyof typeof CallbackMethodEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackMethodEnum = {\n POST: \"POST\",\n PUT: \"PUT\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * Type of event\n */\nexport type CallbackTranscriptionErrorPayloadEvent =\n (typeof CallbackTranscriptionErrorPayloadEvent)[keyof typeof CallbackTranscriptionErrorPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackTranscriptionErrorPayloadEvent = {\n transcriptionerror: \"transcription.error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * Type of event\n */\nexport type CallbackTranscriptionSuccessPayloadEvent =\n (typeof CallbackTranscriptionSuccessPayloadEvent)[keyof typeof CallbackTranscriptionSuccessPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackTranscriptionSuccessPayloadEvent = {\n transcriptionsuccess: \"transcription.success\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type EndRecordingMessageType =\n (typeof EndRecordingMessageType)[keyof typeof EndRecordingMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const EndRecordingMessageType = {\n end_recording: \"end_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type EndSessionMessageType =\n (typeof EndSessionMessageType)[keyof typeof EndSessionMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const EndSessionMessageType = {\n end_session: \"end_session\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type HistoryControllerGetListV1KindItem =\n (typeof HistoryControllerGetListV1KindItem)[keyof typeof HistoryControllerGetListV1KindItem]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const HistoryControllerGetListV1KindItem = {\n \"pre-recorded\": \"pre-recorded\",\n live: \"live\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type HistoryControllerGetListV1StatusItem =\n (typeof HistoryControllerGetListV1StatusItem)[keyof typeof HistoryControllerGetListV1StatusItem]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const HistoryControllerGetListV1StatusItem = {\n queued: \"queued\",\n processing: \"processing\",\n done: \"done\",\n error: \"error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type NamedEntityRecognitionMessageType =\n (typeof NamedEntityRecognitionMessageType)[keyof typeof NamedEntityRecognitionMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const NamedEntityRecognitionMessageType = {\n named_entity_recognition: \"named_entity_recognition\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type PostChapterizationMessageType =\n (typeof PostChapterizationMessageType)[keyof typeof PostChapterizationMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PostChapterizationMessageType = {\n post_chapterization: \"post_chapterization\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type PostFinalTranscriptMessageType =\n (typeof PostFinalTranscriptMessageType)[keyof typeof PostFinalTranscriptMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PostFinalTranscriptMessageType = {\n post_final_transcript: \"post_final_transcript\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type PostSummarizationMessageType =\n (typeof PostSummarizationMessageType)[keyof typeof PostSummarizationMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PostSummarizationMessageType = {\n post_summarization: \"post_summarization\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type PostTranscriptMessageType =\n (typeof PostTranscriptMessageType)[keyof typeof PostTranscriptMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PostTranscriptMessageType = {\n post_transcript: \"post_transcript\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type PreRecordedControllerGetPreRecordedJobsV2StatusItem =\n (typeof PreRecordedControllerGetPreRecordedJobsV2StatusItem)[keyof typeof PreRecordedControllerGetPreRecordedJobsV2StatusItem]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PreRecordedControllerGetPreRecordedJobsV2StatusItem = {\n queued: \"queued\",\n processing: \"processing\",\n done: \"done\",\n error: \"error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type PreRecordedResponseKind =\n (typeof PreRecordedResponseKind)[keyof typeof PreRecordedResponseKind]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PreRecordedResponseKind = {\n \"pre-recorded\": \"pre-recorded\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * \"queued\": the job has been queued. \"processing\": the job is being processed. \"done\": the job has been processed and the result is available. \"error\": an error occurred during the job's processing.\n */\nexport type PreRecordedResponseStatus =\n (typeof PreRecordedResponseStatus)[keyof typeof PreRecordedResponseStatus]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PreRecordedResponseStatus = {\n queued: \"queued\",\n processing: \"processing\",\n done: \"done\",\n error: \"error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type SentimentAnalysisMessageType =\n (typeof SentimentAnalysisMessageType)[keyof typeof SentimentAnalysisMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SentimentAnalysisMessageType = {\n sentiment_analysis: \"sentiment_analysis\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type SpeechEndMessageType = (typeof SpeechEndMessageType)[keyof typeof SpeechEndMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SpeechEndMessageType = {\n speech_end: \"speech_end\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type SpeechStartMessageType =\n (typeof SpeechStartMessageType)[keyof typeof SpeechStartMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SpeechStartMessageType = {\n speech_start: \"speech_start\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type StartRecordingMessageType =\n (typeof StartRecordingMessageType)[keyof typeof StartRecordingMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StartRecordingMessageType = {\n start_recording: \"start_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type StartSessionMessageType =\n (typeof StartSessionMessageType)[keyof typeof StartSessionMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StartSessionMessageType = {\n start_session: \"start_session\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type StopRecordingAckMessageType =\n (typeof StopRecordingAckMessageType)[keyof typeof StopRecordingAckMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StopRecordingAckMessageType = {\n stop_recording: \"stop_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type StopRecordingActionType =\n (typeof StopRecordingActionType)[keyof typeof StopRecordingActionType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StopRecordingActionType = {\n stop_recording: \"stop_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type StreamingControllerGetStreamingJobsV2StatusItem =\n (typeof StreamingControllerGetStreamingJobsV2StatusItem)[keyof typeof StreamingControllerGetStreamingJobsV2StatusItem]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingControllerGetStreamingJobsV2StatusItem = {\n queued: \"queued\",\n processing: \"processing\",\n done: \"done\",\n error: \"error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type StreamingResponseKind =\n (typeof StreamingResponseKind)[keyof typeof StreamingResponseKind]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingResponseKind = {\n live: \"live\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * \"queued\": the job has been queued. \"processing\": the job is being processed. \"done\": the job has been processed and the result is available. \"error\": an error occurred during the job's processing.\n */\nexport type StreamingResponseStatus =\n (typeof StreamingResponseStatus)[keyof typeof StreamingResponseStatus]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingResponseStatus = {\n queued: \"queued\",\n processing: \"processing\",\n done: \"done\",\n error: \"error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * The model used to process the audio. \"solaria-1\" is used by default.\n */\nexport type StreamingSupportedModels =\n (typeof StreamingSupportedModels)[keyof typeof StreamingSupportedModels]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingSupportedModels = {\n \"solaria-1\": \"solaria-1\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type StreamingSupportedRegions =\n (typeof StreamingSupportedRegions)[keyof typeof StreamingSupportedRegions]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingSupportedRegions = {\n \"us-west\": \"us-west\",\n \"eu-west\": \"eu-west\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * Subtitles formats you want your transcription to be formatted to\n */\nexport type SubtitlesFormatEnum = (typeof SubtitlesFormatEnum)[keyof typeof SubtitlesFormatEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SubtitlesFormatEnum = {\n srt: \"srt\",\n vtt: \"vtt\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * Style of the subtitles. Compliance mode refers to : https://loc.gov/preservation/digital/formats//fdd/fdd000569.shtml#:~:text=SRT%20files%20are%20basic%20text,alongside%2C%20example%3A%20%22MyVideo123\n */\nexport type SubtitlesStyleEnum = (typeof SubtitlesStyleEnum)[keyof typeof SubtitlesStyleEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SubtitlesStyleEnum = {\n default: \"default\",\n compliance: \"compliance\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * The type of summarization to apply\n */\nexport type SummaryTypesEnum = (typeof SummaryTypesEnum)[keyof typeof SummaryTypesEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SummaryTypesEnum = {\n general: \"general\",\n bullet_points: \"bullet_points\",\n concise: \"concise\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type TranscriptionControllerListV2KindItem =\n (typeof TranscriptionControllerListV2KindItem)[keyof typeof TranscriptionControllerListV2KindItem]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptionControllerListV2KindItem = {\n \"pre-recorded\": \"pre-recorded\",\n live: \"live\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type TranscriptionControllerListV2StatusItem =\n (typeof TranscriptionControllerListV2StatusItem)[keyof typeof TranscriptionControllerListV2StatusItem]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptionControllerListV2StatusItem = {\n queued: \"queued\",\n processing: \"processing\",\n done: \"done\",\n error: \"error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * Specify the language in which it will be pronounced when sound comparison occurs. Default to transcription language.\n */\nexport type TranscriptionLanguageCodeEnum =\n (typeof TranscriptionLanguageCodeEnum)[keyof typeof TranscriptionLanguageCodeEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptionLanguageCodeEnum = {\n af: \"af\",\n am: \"am\",\n ar: \"ar\",\n as: \"as\",\n az: \"az\",\n ba: \"ba\",\n be: \"be\",\n bg: \"bg\",\n bn: \"bn\",\n bo: \"bo\",\n br: \"br\",\n bs: \"bs\",\n ca: \"ca\",\n cs: \"cs\",\n cy: \"cy\",\n da: \"da\",\n de: \"de\",\n el: \"el\",\n en: \"en\",\n es: \"es\",\n et: \"et\",\n eu: \"eu\",\n fa: \"fa\",\n fi: \"fi\",\n fo: \"fo\",\n fr: \"fr\",\n gl: \"gl\",\n gu: \"gu\",\n ha: \"ha\",\n haw: \"haw\",\n he: \"he\",\n hi: \"hi\",\n hr: \"hr\",\n ht: \"ht\",\n hu: \"hu\",\n hy: \"hy\",\n id: \"id\",\n is: \"is\",\n it: \"it\",\n ja: \"ja\",\n jw: \"jw\",\n ka: \"ka\",\n kk: \"kk\",\n km: \"km\",\n kn: \"kn\",\n ko: \"ko\",\n la: \"la\",\n lb: \"lb\",\n ln: \"ln\",\n lo: \"lo\",\n lt: \"lt\",\n lv: \"lv\",\n mg: \"mg\",\n mi: \"mi\",\n mk: \"mk\",\n ml: \"ml\",\n mn: \"mn\",\n mr: \"mr\",\n ms: \"ms\",\n mt: \"mt\",\n my: \"my\",\n ne: \"ne\",\n nl: \"nl\",\n nn: \"nn\",\n no: \"no\",\n oc: \"oc\",\n pa: \"pa\",\n pl: \"pl\",\n ps: \"ps\",\n pt: \"pt\",\n ro: \"ro\",\n ru: \"ru\",\n sa: \"sa\",\n sd: \"sd\",\n si: \"si\",\n sk: \"sk\",\n sl: \"sl\",\n sn: \"sn\",\n so: \"so\",\n sq: \"sq\",\n sr: \"sr\",\n su: \"su\",\n sv: \"sv\",\n sw: \"sw\",\n ta: \"ta\",\n te: \"te\",\n tg: \"tg\",\n th: \"th\",\n tk: \"tk\",\n tl: \"tl\",\n tr: \"tr\",\n tt: \"tt\",\n uk: \"uk\",\n ur: \"ur\",\n uz: \"uz\",\n vi: \"vi\",\n yi: \"yi\",\n yo: \"yo\",\n zh: \"zh\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type TranscriptMessageType =\n (typeof TranscriptMessageType)[keyof typeof TranscriptMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptMessageType = {\n transcript: \"transcript\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * Target language in `iso639-1` format you want the transcription translated to\n */\nexport type TranslationLanguageCodeEnum =\n (typeof TranslationLanguageCodeEnum)[keyof typeof TranslationLanguageCodeEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranslationLanguageCodeEnum = {\n af: \"af\",\n am: \"am\",\n ar: \"ar\",\n as: \"as\",\n az: \"az\",\n ba: \"ba\",\n be: \"be\",\n bg: \"bg\",\n bn: \"bn\",\n bo: \"bo\",\n br: \"br\",\n bs: \"bs\",\n ca: \"ca\",\n cs: \"cs\",\n cy: \"cy\",\n da: \"da\",\n de: \"de\",\n el: \"el\",\n en: \"en\",\n es: \"es\",\n et: \"et\",\n eu: \"eu\",\n fa: \"fa\",\n fi: \"fi\",\n fo: \"fo\",\n fr: \"fr\",\n gl: \"gl\",\n gu: \"gu\",\n ha: \"ha\",\n haw: \"haw\",\n he: \"he\",\n hi: \"hi\",\n hr: \"hr\",\n ht: \"ht\",\n hu: \"hu\",\n hy: \"hy\",\n id: \"id\",\n is: \"is\",\n it: \"it\",\n ja: \"ja\",\n jw: \"jw\",\n ka: \"ka\",\n kk: \"kk\",\n km: \"km\",\n kn: \"kn\",\n ko: \"ko\",\n la: \"la\",\n lb: \"lb\",\n ln: \"ln\",\n lo: \"lo\",\n lt: \"lt\",\n lv: \"lv\",\n mg: \"mg\",\n mi: \"mi\",\n mk: \"mk\",\n ml: \"ml\",\n mn: \"mn\",\n mr: \"mr\",\n ms: \"ms\",\n mt: \"mt\",\n my: \"my\",\n ne: \"ne\",\n nl: \"nl\",\n nn: \"nn\",\n no: \"no\",\n oc: \"oc\",\n pa: \"pa\",\n pl: \"pl\",\n ps: \"ps\",\n pt: \"pt\",\n ro: \"ro\",\n ru: \"ru\",\n sa: \"sa\",\n sd: \"sd\",\n si: \"si\",\n sk: \"sk\",\n sl: \"sl\",\n sn: \"sn\",\n so: \"so\",\n sq: \"sq\",\n sr: \"sr\",\n su: \"su\",\n sv: \"sv\",\n sw: \"sw\",\n ta: \"ta\",\n te: \"te\",\n tg: \"tg\",\n th: \"th\",\n tk: \"tk\",\n tl: \"tl\",\n tr: \"tr\",\n tt: \"tt\",\n uk: \"uk\",\n ur: \"ur\",\n uz: \"uz\",\n vi: \"vi\",\n wo: \"wo\",\n yi: \"yi\",\n yo: \"yo\",\n zh: \"zh\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type TranslationMessageType =\n (typeof TranslationMessageType)[keyof typeof TranslationMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranslationMessageType = {\n translation: \"translation\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * Model you want the translation model to use to translate\n */\nexport type TranslationModelEnum = (typeof TranslationModelEnum)[keyof typeof TranslationModelEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranslationModelEnum = {\n base: \"base\",\n enhanced: \"enhanced\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type VideoToTextControllerVideoTranscriptionBodyLanguage =\n (typeof VideoToTextControllerVideoTranscriptionBodyLanguage)[keyof typeof VideoToTextControllerVideoTranscriptionBodyLanguage]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const VideoToTextControllerVideoTranscriptionBodyLanguage = {\n afrikaans: \"afrikaans\",\n albanian: \"albanian\",\n amharic: \"amharic\",\n arabic: \"arabic\",\n armenian: \"armenian\",\n assamese: \"assamese\",\n azerbaijani: \"azerbaijani\",\n bashkir: \"bashkir\",\n basque: \"basque\",\n belarusian: \"belarusian\",\n bengali: \"bengali\",\n bosnian: \"bosnian\",\n breton: \"breton\",\n bulgarian: \"bulgarian\",\n catalan: \"catalan\",\n chinese: \"chinese\",\n croatian: \"croatian\",\n czech: \"czech\",\n danish: \"danish\",\n dutch: \"dutch\",\n english: \"english\",\n estonian: \"estonian\",\n faroese: \"faroese\",\n finnish: \"finnish\",\n french: \"french\",\n galician: \"galician\",\n georgian: \"georgian\",\n german: \"german\",\n greek: \"greek\",\n gujarati: \"gujarati\",\n haitian_creole: \"haitian creole\",\n hausa: \"hausa\",\n hawaiian: \"hawaiian\",\n hebrew: \"hebrew\",\n hindi: \"hindi\",\n hungarian: \"hungarian\",\n icelandic: \"icelandic\",\n indonesian: \"indonesian\",\n italian: \"italian\",\n japanese: \"japanese\",\n javanese: \"javanese\",\n kannada: \"kannada\",\n kazakh: \"kazakh\",\n khmer: \"khmer\",\n korean: \"korean\",\n lao: \"lao\",\n latin: \"latin\",\n latvian: \"latvian\",\n lingala: \"lingala\",\n lithuanian: \"lithuanian\",\n luxembourgish: \"luxembourgish\",\n macedonian: \"macedonian\",\n malagasy: \"malagasy\",\n malay: \"malay\",\n malayalam: \"malayalam\",\n maltese: \"maltese\",\n maori: \"maori\",\n marathi: \"marathi\",\n mongolian: \"mongolian\",\n myanmar: \"myanmar\",\n nepali: \"nepali\",\n norwegian: \"norwegian\",\n nynorsk: \"nynorsk\",\n occitan: \"occitan\",\n pashto: \"pashto\",\n persian: \"persian\",\n polish: \"polish\",\n portuguese: \"portuguese\",\n punjabi: \"punjabi\",\n romanian: \"romanian\",\n russian: \"russian\",\n sanskrit: \"sanskrit\",\n serbian: \"serbian\",\n shona: \"shona\",\n sindhi: \"sindhi\",\n sinhala: \"sinhala\",\n slovak: \"slovak\",\n slovenian: \"slovenian\",\n somali: \"somali\",\n spanish: \"spanish\",\n sundanese: \"sundanese\",\n swahili: \"swahili\",\n swedish: \"swedish\",\n tagalog: \"tagalog\",\n tajik: \"tajik\",\n tamil: \"tamil\",\n tatar: \"tatar\",\n telugu: \"telugu\",\n thai: \"thai\",\n tibetan: \"tibetan\",\n turkish: \"turkish\",\n turkmen: \"turkmen\",\n ukrainian: \"ukrainian\",\n urdu: \"urdu\",\n uzbek: \"uzbek\",\n vietnamese: \"vietnamese\",\n welsh: \"welsh\",\n yiddish: \"yiddish\",\n yoruba: \"yoruba\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type VideoToTextControllerVideoTranscriptionBodyLanguageBehaviour =\n (typeof VideoToTextControllerVideoTranscriptionBodyLanguageBehaviour)[keyof typeof VideoToTextControllerVideoTranscriptionBodyLanguageBehaviour]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const VideoToTextControllerVideoTranscriptionBodyLanguageBehaviour = {\n automatic_single_language: \"automatic single language\",\n automatic_multiple_languages: \"automatic multiple languages\",\n manual: \"manual\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type VideoToTextControllerVideoTranscriptionBodyOutputFormat =\n (typeof VideoToTextControllerVideoTranscriptionBodyOutputFormat)[keyof typeof VideoToTextControllerVideoTranscriptionBodyOutputFormat]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const VideoToTextControllerVideoTranscriptionBodyOutputFormat = {\n json: \"json\",\n srt: \"srt\",\n vtt: \"vtt\",\n plain: \"plain\",\n txt: \"txt\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type VideoToTextControllerVideoTranscriptionBodyTargetTranslationLanguage =\n (typeof VideoToTextControllerVideoTranscriptionBodyTargetTranslationLanguage)[keyof typeof VideoToTextControllerVideoTranscriptionBodyTargetTranslationLanguage]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const VideoToTextControllerVideoTranscriptionBodyTargetTranslationLanguage = {\n afrikaans: \"afrikaans\",\n albanian: \"albanian\",\n amharic: \"amharic\",\n arabic: \"arabic\",\n armenian: \"armenian\",\n assamese: \"assamese\",\n azerbaijani: \"azerbaijani\",\n bashkir: \"bashkir\",\n basque: \"basque\",\n belarusian: \"belarusian\",\n bengali: \"bengali\",\n bosnian: \"bosnian\",\n breton: \"breton\",\n bulgarian: \"bulgarian\",\n catalan: \"catalan\",\n chinese: \"chinese\",\n croatian: \"croatian\",\n czech: \"czech\",\n danish: \"danish\",\n dutch: \"dutch\",\n english: \"english\",\n estonian: \"estonian\",\n faroese: \"faroese\",\n finnish: \"finnish\",\n french: \"french\",\n galician: \"galician\",\n georgian: \"georgian\",\n german: \"german\",\n greek: \"greek\",\n gujarati: \"gujarati\",\n haitian_creole: \"haitian creole\",\n hausa: \"hausa\",\n hawaiian: \"hawaiian\",\n hebrew: \"hebrew\",\n hindi: \"hindi\",\n hungarian: \"hungarian\",\n icelandic: \"icelandic\",\n indonesian: \"indonesian\",\n italian: \"italian\",\n japanese: \"japanese\",\n javanese: \"javanese\",\n kannada: \"kannada\",\n kazakh: \"kazakh\",\n khmer: \"khmer\",\n korean: \"korean\",\n lao: \"lao\",\n latin: \"latin\",\n latvian: \"latvian\",\n lingala: \"lingala\",\n lithuanian: \"lithuanian\",\n luxembourgish: \"luxembourgish\",\n macedonian: \"macedonian\",\n malagasy: \"malagasy\",\n malay: \"malay\",\n malayalam: \"malayalam\",\n maltese: \"maltese\",\n maori: \"maori\",\n marathi: \"marathi\",\n mongolian: \"mongolian\",\n myanmar: \"myanmar\",\n nepali: \"nepali\",\n norwegian: \"norwegian\",\n nynorsk: \"nynorsk\",\n occitan: \"occitan\",\n pashto: \"pashto\",\n persian: \"persian\",\n polish: \"polish\",\n portuguese: \"portuguese\",\n punjabi: \"punjabi\",\n romanian: \"romanian\",\n russian: \"russian\",\n sanskrit: \"sanskrit\",\n serbian: \"serbian\",\n shona: \"shona\",\n sindhi: \"sindhi\",\n sinhala: \"sinhala\",\n slovak: \"slovak\",\n slovenian: \"slovenian\",\n somali: \"somali\",\n spanish: \"spanish\",\n sundanese: \"sundanese\",\n swahili: \"swahili\",\n swedish: \"swedish\",\n tagalog: \"tagalog\",\n tajik: \"tajik\",\n tamil: \"tamil\",\n tatar: \"tatar\",\n telugu: \"telugu\",\n thai: \"thai\",\n tibetan: \"tibetan\",\n turkish: \"turkish\",\n turkmen: \"turkmen\",\n ukrainian: \"ukrainian\",\n urdu: \"urdu\",\n uzbek: \"uzbek\",\n vietnamese: \"vietnamese\",\n welsh: \"welsh\",\n wolof: \"wolof\",\n yiddish: \"yiddish\",\n yoruba: \"yoruba\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type WebhookLiveEndRecordingPayloadEvent =\n (typeof WebhookLiveEndRecordingPayloadEvent)[keyof typeof WebhookLiveEndRecordingPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const WebhookLiveEndRecordingPayloadEvent = {\n liveend_recording: \"live.end_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type WebhookLiveEndSessionPayloadEvent =\n (typeof WebhookLiveEndSessionPayloadEvent)[keyof typeof WebhookLiveEndSessionPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const WebhookLiveEndSessionPayloadEvent = {\n liveend_session: \"live.end_session\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type WebhookLiveStartRecordingPayloadEvent =\n (typeof WebhookLiveStartRecordingPayloadEvent)[keyof typeof WebhookLiveStartRecordingPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const WebhookLiveStartRecordingPayloadEvent = {\n livestart_recording: \"live.start_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type WebhookLiveStartSessionPayloadEvent =\n (typeof WebhookLiveStartSessionPayloadEvent)[keyof typeof WebhookLiveStartSessionPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const WebhookLiveStartSessionPayloadEvent = {\n livestart_session: \"live.start_session\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type WebhookTranscriptionCreatedPayloadEvent =\n (typeof WebhookTranscriptionCreatedPayloadEvent)[keyof typeof WebhookTranscriptionCreatedPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const WebhookTranscriptionCreatedPayloadEvent = {\n transcriptioncreated: \"transcription.created\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type WebhookTranscriptionErrorPayloadEvent =\n (typeof WebhookTranscriptionErrorPayloadEvent)[keyof typeof WebhookTranscriptionErrorPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const WebhookTranscriptionErrorPayloadEvent = {\n transcriptionerror: \"transcription.error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type WebhookTranscriptionSuccessPayloadEvent =\n (typeof WebhookTranscriptionSuccessPayloadEvent)[keyof typeof WebhookTranscriptionSuccessPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const WebhookTranscriptionSuccessPayloadEvent = {\n transcriptionsuccess: \"transcription.success\"\n} as const\n","/**\n * AssemblyAI transcription provider adapter\n * Documentation: https://www.assemblyai.com/docs\n */\n\nimport axios from \"axios\"\nimport WebSocket from \"ws\"\nimport type {\n AudioChunk,\n AudioInput,\n ProviderCapabilities,\n StreamingCallbacks,\n StreamingOptions,\n StreamingSession,\n TranscribeOptions,\n UnifiedTranscriptResponse\n} from \"../router/types\"\nimport { BaseAdapter, type ProviderConfig } from \"./base-adapter\"\n\n// Import generated API client functions - FULL TYPE SAFETY!\nimport {\n createTranscript,\n getTranscript as getTranscriptAPI,\n createTemporaryToken\n} from \"../generated/assemblyai/api/assemblyAIAPI\"\n\n// Import AssemblyAI generated types\nimport type { Transcript } from \"../generated/assemblyai/schema/transcript\"\nimport type { TranscriptParams } from \"../generated/assemblyai/schema/transcriptParams\"\nimport type { TranscriptStatus } from \"../generated/assemblyai/schema/transcriptStatus\"\nimport type { TranscriptWord } from \"../generated/assemblyai/schema/transcriptWord\"\nimport type { TranscriptUtterance } from \"../generated/assemblyai/schema/transcriptUtterance\"\n\n// Import AssemblyAI v3 Streaming types (auto-synced from SDK)\nimport type {\n BeginEvent,\n TurnEvent,\n TerminationEvent,\n ErrorEvent,\n StreamingEventMessage,\n StreamingWord\n} from \"../generated/assemblyai/streaming-types\"\n\n/**\n * AssemblyAI transcription provider adapter\n *\n * Implements transcription for the AssemblyAI API with support for:\n * - Synchronous and asynchronous transcription\n * - Speaker diarization (speaker labels)\n * - Multi-language detection and transcription\n * - Summarization and sentiment analysis\n * - Entity detection and content moderation\n * - Custom vocabulary and spelling\n * - Word-level timestamps\n * - PII redaction\n *\n * @see https://www.assemblyai.com/docs AssemblyAI API Documentation\n *\n * @example Basic transcription\n * ```typescript\n * import { AssemblyAIAdapter } from '@meeting-baas/sdk';\n *\n * const adapter = new AssemblyAIAdapter();\n * adapter.initialize({\n * apiKey: process.env.ASSEMBLYAI_API_KEY\n * });\n *\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en',\n * diarization: true\n * });\n *\n * console.log(result.data.text);\n * console.log(result.data.speakers);\n * ```\n *\n * @example With advanced features\n * ```typescript\n * const result = await adapter.transcribe(audio, {\n * language: 'en_us',\n * diarization: true,\n * summarization: true,\n * sentimentAnalysis: true,\n * entityDetection: true,\n * piiRedaction: true\n * });\n *\n * console.log('Summary:', result.data.summary);\n * console.log('Entities:', result.data.metadata?.entities);\n * ```\n */\nexport class AssemblyAIAdapter extends BaseAdapter {\n readonly name = \"assemblyai\" as const\n readonly capabilities: ProviderCapabilities = {\n streaming: true,\n diarization: true,\n wordTimestamps: true,\n languageDetection: true,\n customVocabulary: true,\n summarization: true,\n sentimentAnalysis: true,\n entityDetection: true,\n piiRedaction: true\n }\n\n protected baseUrl = \"https://api.assemblyai.com\" // Generated functions already include /v2 path\n private wsBaseUrl = \"wss://streaming.assemblyai.com/v3/ws\" // v3 Universal Streaming endpoint\n\n /**\n * Get axios config for generated API client functions\n * Configures headers and base URL using authorization header\n */\n protected getAxiosConfig() {\n return super.getAxiosConfig(\"authorization\")\n }\n\n /**\n * Submit audio for transcription\n *\n * Sends audio to AssemblyAI API for transcription. If a webhook URL is provided,\n * returns immediately with the job ID. Otherwise, polls until completion.\n *\n * @param audio - Audio input (currently only URL type supported)\n * @param options - Transcription options\n * @param options.language - Language code (e.g., 'en', 'en_us', 'es', 'fr')\n * @param options.languageDetection - Enable automatic language detection\n * @param options.diarization - Enable speaker identification (speaker_labels)\n * @param options.speakersExpected - Number of expected speakers\n * @param options.summarization - Generate text summary\n * @param options.sentimentAnalysis - Analyze sentiment of transcription\n * @param options.entityDetection - Detect named entities (people, places, etc.)\n * @param options.piiRedaction - Redact personally identifiable information\n * @param options.customVocabulary - Words to boost in recognition\n * @param options.webhookUrl - Callback URL for async results\n * @returns Normalized transcription response\n * @throws {Error} If audio type is not 'url' (file/stream not yet supported)\n *\n * @example Simple transcription\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * });\n * ```\n *\n * @example With advanced features\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * }, {\n * language: 'en_us',\n * diarization: true,\n * speakersExpected: 3,\n * summarization: true,\n * sentimentAnalysis: true,\n * entityDetection: true,\n * customVocabulary: ['API', 'TypeScript', 'JavaScript']\n * });\n * ```\n *\n * @example With webhook (returns transcript ID immediately for polling)\n * ```typescript\n * // Submit transcription with webhook\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * }, {\n * webhookUrl: 'https://myapp.com/webhook/transcription',\n * language: 'en_us'\n * });\n *\n * // Get transcript ID for polling\n * const transcriptId = result.data?.id;\n * console.log('Transcript ID:', transcriptId); // Use this to poll for status\n *\n * // Later: Poll for completion (if webhook fails or you want to check)\n * const status = await adapter.getTranscript(transcriptId);\n * if (status.data?.status === 'completed') {\n * console.log('Transcript:', status.data.text);\n * }\n * ```\n */\n async transcribe(\n audio: AudioInput,\n options?: TranscribeOptions\n ): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Build typed request using generated types\n const request = this.buildTranscriptionRequest(audio, options)\n\n // Use generated API client function - FULLY TYPED!\n const response = await createTranscript(request, this.getAxiosConfig())\n\n const transcriptId = response.data.id\n\n // If webhook is provided, return immediately with job ID\n if (options?.webhookUrl) {\n return {\n success: true,\n provider: this.name,\n data: {\n id: transcriptId,\n text: \"\",\n status: \"queued\"\n },\n raw: response.data\n }\n }\n\n // Otherwise, poll for results\n return await this.pollForCompletion(transcriptId)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Get transcription result by ID\n */\n async getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Use generated API client function - FULLY TYPED!\n const response = await getTranscriptAPI(transcriptId, this.getAxiosConfig())\n\n return this.normalizeResponse(response.data)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Build AssemblyAI transcription request from unified options\n */\n private buildTranscriptionRequest(\n audio: AudioInput,\n options?: TranscribeOptions\n ): TranscriptParams {\n // Get audio URL\n let audioUrl: string\n if (audio.type === \"url\") {\n audioUrl = audio.url\n } else {\n throw new Error(\n \"AssemblyAI adapter currently only supports URL-based audio input. Use audio.type='url'\"\n )\n }\n\n const request: TranscriptParams = {\n audio_url: audioUrl\n }\n\n // Map options to AssemblyAI format\n if (options) {\n // Language configuration\n if (options.language) {\n // Convert ISO codes to AssemblyAI format (e.g., 'en' -> 'en_us')\n const languageCode = options.language.includes(\"_\")\n ? options.language\n : `${options.language}_us`\n request.language_code = languageCode\n }\n\n if (options.languageDetection) {\n request.language_detection = true\n }\n\n // Speaker diarization\n if (options.diarization) {\n request.speaker_labels = true\n if (options.speakersExpected) {\n request.speakers_expected = options.speakersExpected\n }\n }\n\n // Custom vocabulary (word boost)\n if (options.customVocabulary && options.customVocabulary.length > 0) {\n request.word_boost = options.customVocabulary\n request.boost_param = \"high\" // default to high boost\n }\n\n // Summarization\n if (options.summarization) {\n request.summarization = true\n request.summary_model = \"informative\"\n request.summary_type = \"bullets\"\n }\n\n // Sentiment analysis\n if (options.sentimentAnalysis) {\n request.sentiment_analysis = true\n }\n\n // Entity detection\n if (options.entityDetection) {\n request.entity_detection = true\n }\n\n // PII redaction\n if (options.piiRedaction) {\n request.redact_pii = true\n }\n\n // Webhook callback\n if (options.webhookUrl) {\n request.webhook_url = options.webhookUrl\n }\n\n // Enable word timestamps by default (AssemblyAI includes them automatically)\n // Enable punctuation and formatting for better results\n request.punctuate = true\n request.format_text = true\n }\n\n return request\n }\n\n /**\n * Normalize AssemblyAI response to unified format\n */\n private normalizeResponse(response: Transcript): UnifiedTranscriptResponse {\n // Map AssemblyAI status to unified status\n let status: \"queued\" | \"processing\" | \"completed\" | \"error\"\n switch (response.status) {\n case \"queued\":\n status = \"queued\"\n break\n case \"processing\":\n status = \"processing\"\n break\n case \"completed\":\n status = \"completed\"\n break\n case \"error\":\n status = \"error\"\n break\n default:\n status = \"queued\"\n }\n\n // Handle error state\n if (response.status === \"error\") {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"TRANSCRIPTION_ERROR\",\n message: response.error || \"Transcription failed\"\n },\n raw: response\n }\n }\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: response.id,\n text: response.text || \"\",\n confidence: response.confidence !== null ? response.confidence : undefined,\n status,\n language: response.language_code,\n duration: response.audio_duration ? response.audio_duration / 1000 : undefined, // Convert ms to seconds\n speakers: this.extractSpeakers(response),\n words: this.extractWords(response),\n utterances: this.extractUtterances(response),\n summary: response.summary || undefined,\n metadata: {\n audioUrl: response.audio_url,\n entities: response.entities,\n sentimentAnalysis: response.sentiment_analysis_results,\n contentModeration: response.content_safety_labels\n }\n },\n raw: response\n }\n }\n\n /**\n * Extract speaker information from AssemblyAI response\n */\n private extractSpeakers(transcript: Transcript) {\n if (!transcript.utterances || transcript.utterances.length === 0) {\n return undefined\n }\n\n // Extract unique speakers from utterances\n const speakerSet = new Set<string>()\n transcript.utterances.forEach((utterance: TranscriptUtterance) => {\n if (utterance.speaker) {\n speakerSet.add(utterance.speaker)\n }\n })\n\n if (speakerSet.size === 0) {\n return undefined\n }\n\n return Array.from(speakerSet).map((speakerId) => ({\n id: speakerId,\n label: speakerId // AssemblyAI uses format like \"A\", \"B\", \"C\"\n }))\n }\n\n /**\n * Extract word timestamps from AssemblyAI response\n */\n private extractWords(transcript: Transcript) {\n if (!transcript.words || transcript.words.length === 0) {\n return undefined\n }\n\n return transcript.words.map((word: TranscriptWord) => ({\n text: word.text,\n start: word.start / 1000, // Convert ms to seconds\n end: word.end / 1000, // Convert ms to seconds\n confidence: word.confidence,\n speaker: word.speaker || undefined\n }))\n }\n\n /**\n * Extract utterances from AssemblyAI response\n */\n private extractUtterances(transcript: Transcript) {\n if (!transcript.utterances || transcript.utterances.length === 0) {\n return undefined\n }\n\n return transcript.utterances.map((utterance: TranscriptUtterance) => ({\n text: utterance.text,\n start: utterance.start / 1000, // Convert ms to seconds\n end: utterance.end / 1000, // Convert ms to seconds\n speaker: utterance.speaker || undefined,\n confidence: utterance.confidence,\n words: utterance.words.map((word: TranscriptWord) => ({\n text: word.text,\n start: word.start / 1000,\n end: word.end / 1000,\n confidence: word.confidence\n }))\n }))\n }\n\n /**\n * Stream audio for real-time transcription\n *\n * Creates a WebSocket connection to AssemblyAI for streaming transcription.\n * First obtains a temporary token, then connects and streams audio chunks.\n *\n * @param options - Streaming configuration options\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n *\n * @example Real-time streaming\n * ```typescript\n * const session = await adapter.transcribeStream({\n * encoding: 'pcm_s16le',\n * sampleRate: 16000,\n * language: 'en',\n * interimResults: true\n * }, {\n * onOpen: () => console.log('Connected'),\n * onTranscript: (event) => {\n * if (event.isFinal) {\n * console.log('Final:', event.text);\n * } else {\n * console.log('Interim:', event.text);\n * }\n * },\n * onError: (error) => console.error('Error:', error),\n * onClose: () => console.log('Disconnected')\n * });\n *\n * // Send audio chunks\n * const audioChunk = getAudioChunk(); // Your audio source\n * await session.sendAudio({ data: audioChunk });\n *\n * // Close when done\n * await session.close();\n * ```\n */\n async transcribeStream(\n options?: StreamingOptions,\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession> {\n this.validateConfig()\n\n if (!this.config?.apiKey) {\n throw new Error(\"API key is required for streaming\")\n }\n\n // Step 1: Build WebSocket URL with parameters\n // v3 supports authentication via API key header (no token needed)\n const sampleRate = options?.sampleRate || 16000\n const encoding = options?.encoding || \"pcm_s16le\"\n const wsUrl = `${this.wsBaseUrl}?sample_rate=${sampleRate}&encoding=${encoding}`\n\n // Step 2: Create WebSocket connection with API key in headers\n const ws = new WebSocket(wsUrl, {\n headers: {\n Authorization: this.config.apiKey\n }\n })\n\n let sessionStatus: \"connecting\" | \"open\" | \"closing\" | \"closed\" = \"connecting\"\n const sessionId = `assemblyai-${Date.now()}-${Math.random().toString(36).substring(7)}`\n\n // Handle WebSocket events\n ws.on(\"open\", () => {\n sessionStatus = \"open\"\n callbacks?.onOpen?.()\n })\n\n ws.on(\"message\", (data: Buffer) => {\n try {\n const message = JSON.parse(data.toString()) as StreamingEventMessage\n\n // Handle different message types from AssemblyAI v3 - TYPE SAFE!\n // Check for error first (it doesn't have a 'type' field)\n if (\"error\" in message) {\n // Type narrowed to ErrorEvent\n callbacks?.onError?.({\n code: \"API_ERROR\",\n message: (message as ErrorEvent).error\n })\n return\n }\n\n // Now we know it has a 'type' field\n if ((message as BeginEvent | TurnEvent | TerminationEvent).type === \"Begin\") {\n // Type narrowed to BeginEvent\n const beginMsg = message as BeginEvent\n callbacks?.onMetadata?.({\n sessionId: beginMsg.id,\n expiresAt: new Date(beginMsg.expires_at).toISOString()\n })\n } else if ((message as BeginEvent | TurnEvent | TerminationEvent).type === \"Turn\") {\n // Type narrowed to TurnEvent\n const turnMsg = message as TurnEvent\n // v3 uses a single \"Turn\" event with end_of_turn flag instead of PartialTranscript/FinalTranscript\n callbacks?.onTranscript?.({\n type: \"transcript\",\n text: turnMsg.transcript,\n isFinal: turnMsg.end_of_turn,\n confidence: turnMsg.end_of_turn_confidence,\n words: turnMsg.words.map((word: StreamingWord) => ({\n text: word.text,\n start: word.start / 1000, // Convert ms to seconds\n end: word.end / 1000,\n confidence: word.confidence\n })),\n data: turnMsg\n })\n } else if ((message as BeginEvent | TurnEvent | TerminationEvent).type === \"Termination\") {\n // Type narrowed to TerminationEvent\n const termMsg = message as TerminationEvent\n callbacks?.onMetadata?.({\n terminated: true,\n audioDurationSeconds: termMsg.audio_duration_seconds,\n sessionDurationSeconds: termMsg.session_duration_seconds\n })\n }\n } catch (error) {\n callbacks?.onError?.({\n code: \"PARSE_ERROR\",\n message: \"Failed to parse WebSocket message\",\n details: error\n })\n }\n })\n\n ws.on(\"error\", (error: Error) => {\n callbacks?.onError?.({\n code: \"WEBSOCKET_ERROR\",\n message: error.message,\n details: error\n })\n })\n\n ws.on(\"close\", (code: number, reason: Buffer) => {\n sessionStatus = \"closed\"\n callbacks?.onClose?.(code, reason.toString())\n })\n\n // Wait for connection to open\n await new Promise<void>((resolve, reject) => {\n const timeout = setTimeout(() => {\n reject(new Error(\"WebSocket connection timeout\"))\n }, 10000)\n\n ws.once(\"open\", () => {\n clearTimeout(timeout)\n resolve()\n })\n\n ws.once(\"error\", (error) => {\n clearTimeout(timeout)\n reject(error)\n })\n })\n\n // Return StreamingSession interface\n return {\n id: sessionId,\n provider: this.name,\n createdAt: new Date(),\n getStatus: () => sessionStatus,\n sendAudio: async (chunk: AudioChunk) => {\n if (sessionStatus !== \"open\") {\n throw new Error(`Cannot send audio: session is ${sessionStatus}`)\n }\n\n if (ws.readyState !== WebSocket.OPEN) {\n throw new Error(\"WebSocket is not open\")\n }\n\n // AssemblyAI expects base64-encoded audio data\n const base64Audio = chunk.data.toString(\"base64\")\n\n // Send audio data as JSON message\n ws.send(\n JSON.stringify({\n audio_data: base64Audio\n })\n )\n\n // Send termination message if this is the last chunk\n if (chunk.isLast) {\n ws.send(\n JSON.stringify({\n terminate_session: true\n })\n )\n }\n },\n close: async () => {\n if (sessionStatus === \"closed\" || sessionStatus === \"closing\") {\n return\n }\n\n sessionStatus = \"closing\"\n\n // Send termination message before closing\n if (ws.readyState === WebSocket.OPEN) {\n ws.send(\n JSON.stringify({\n terminate_session: true\n })\n )\n }\n\n // Close WebSocket\n return new Promise<void>((resolve) => {\n const timeout = setTimeout(() => {\n ws.terminate()\n resolve()\n }, 5000)\n\n ws.close()\n\n ws.once(\"close\", () => {\n clearTimeout(timeout)\n sessionStatus = \"closed\"\n resolve()\n })\n })\n }\n }\n }\n}\n\n/**\n * Factory function to create an AssemblyAI adapter\n */\nexport function createAssemblyAIAdapter(config: ProviderConfig): AssemblyAIAdapter {\n const adapter = new AssemblyAIAdapter()\n adapter.initialize(config)\n return adapter\n}\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\nimport { faker } from \"@faker-js/faker\"\nimport type { AxiosRequestConfig, AxiosResponse } from \"axios\"\nimport axios from \"axios\"\nimport { delay, HttpResponse, http } from \"msw\"\nimport type {\n AutoHighlightsResult,\n ContentSafetyLabelsResult,\n CreateRealtimeTemporaryTokenParams,\n GetSubtitlesParams,\n LemurQuestionAnswerParams,\n LemurQuestionAnswerResponse,\n LemurResponse,\n LemurStringResponse,\n LemurSummaryParams,\n LemurSummaryResponse,\n LemurTaskParams,\n LemurTaskResponse,\n ListTranscriptsParams,\n ParagraphsResponse,\n PurgeLemurRequestDataResponse,\n RealtimeTemporaryTokenResponse,\n RedactedAudioResponse,\n SentencesResponse,\n SubtitleFormat,\n TopicDetectionModelResult,\n Transcript,\n TranscriptList,\n TranscriptParams,\n UploadedFile,\n WordSearchParams,\n WordSearchResponse\n} from \"../schema\"\nimport {\n AudioIntelligenceModelStatus,\n RedactedAudioStatus,\n RedactPiiAudioQuality,\n SpeechModel,\n SubstitutionPolicy,\n TranscriptLanguageCode,\n TranscriptStatus\n} from \"../schema\"\n\n/**\n * <Note>To upload a media file to our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nUpload a media file to AssemblyAI's servers.\n\n * @summary Upload a media file\n */\nexport const uploadFile = <TData = AxiosResponse<UploadedFile>>(\n uploadFileBody: Blob,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/v2/upload\", uploadFileBody, options)\n}\n\n/**\n * <Note>To use our EU server for transcription, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nCreate a transcript from a media file that is accessible via a URL.\n\n * @summary Transcribe audio\n */\nexport const createTranscript = <TData = AxiosResponse<Transcript>>(\n transcriptParams: TranscriptParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/v2/transcript\", transcriptParams, options)\n}\n\n/**\n * <Note>To retrieve your transcriptions on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nRetrieve a list of transcripts you created. \nTranscripts are sorted from newest to oldest and can be retrieved for the last 90 days of usage. The previous URL always points to a page with older transcripts.\n\nIf you need to retrieve transcripts from more than 90 days ago please reach out to our Support team at support@assemblyai.com.\n\n * @summary List transcripts\n */\nexport const listTranscripts = <TData = AxiosResponse<TranscriptList>>(\n params?: ListTranscriptsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/v2/transcript\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * <Note>To retrieve your transcriptions on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nGet the transcript resource. The transcript is ready when the \"status\" is \"completed\".\n\n * @summary Get transcript\n */\nexport const getTranscript = <TData = AxiosResponse<Transcript>>(\n transcriptId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcript/${transcriptId}`, options)\n}\n\n/**\n * <Note>To delete your transcriptions on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nRemove the data from the transcript and mark it as deleted.\n\n * @summary Delete transcript\n */\nexport const deleteTranscript = <TData = AxiosResponse<Transcript>>(\n transcriptId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/v2/transcript/${transcriptId}`, options)\n}\n\n/**\n * <Note>To retrieve your transcriptions on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nExport your transcript in SRT or VTT format to use with a video player for subtitles and closed captions.\n\n * @summary Get subtitles for transcript\n */\nexport const getSubtitles = <TData = AxiosResponse<string>>(\n transcriptId: string,\n subtitleFormat: SubtitleFormat,\n params?: GetSubtitlesParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcript/${transcriptId}/${subtitleFormat}`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * <Note>To retrieve your transcriptions on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nGet the transcript split by sentences. The API will attempt to semantically segment the transcript into sentences to create more reader-friendly transcripts.\n\n * @summary Get sentences in transcript\n */\nexport const getTranscriptSentences = <TData = AxiosResponse<SentencesResponse>>(\n transcriptId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcript/${transcriptId}/sentences`, options)\n}\n\n/**\n * <Note>To retrieve your transcriptions on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nGet the transcript split by paragraphs. The API will attempt to semantically segment your transcript into paragraphs to create more reader-friendly transcripts.\n\n * @summary Get paragraphs in transcript\n */\nexport const getTranscriptParagraphs = <TData = AxiosResponse<ParagraphsResponse>>(\n transcriptId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcript/${transcriptId}/paragraphs`, options)\n}\n\n/**\n * <Note>To search through a transcription created on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nSearch through the transcript for keywords. You can search for individual words, numbers, or phrases containing up to five words or numbers.\n\n * @summary Search words in transcript\n */\nexport const wordSearch = <TData = AxiosResponse<WordSearchResponse>>(\n transcriptId: string,\n params: WordSearchParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcript/${transcriptId}/word-search`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * <Note>To retrieve your transcriptions on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nRetrieve the redacted audio object containing the status and URL to the redacted audio.\n\n * @summary Get redacted audio\n */\nexport const getRedactedAudio = <TData = AxiosResponse<RedactedAudioResponse>>(\n transcriptId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcript/${transcriptId}/redacted-audio`, options)\n}\n\n/**\n * <Warning>Streaming Speech-to-Text is currently not available on the EU endpoint.</Warning>\n<Note>Any usage associated with a temporary token will be attributed to the API key that generated it.</Note>\nCreate a temporary authentication token for Streaming Speech-to-Text\n\n * @summary Create temporary authentication token for Streaming STT\n */\nexport const createTemporaryToken = <TData = AxiosResponse<RealtimeTemporaryTokenResponse>>(\n createRealtimeTemporaryTokenParams: CreateRealtimeTemporaryTokenParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/v2/realtime/token\", createRealtimeTemporaryTokenParams, options)\n}\n\n/**\n * Use the LeMUR task endpoint to input your own LLM prompt.\n\n * @summary Run a task using LeMUR\n */\nexport const lemurTask = <TData = AxiosResponse<LemurTaskResponse>>(\n lemurTaskParams: LemurTaskParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/lemur/v3/generate/task\", lemurTaskParams, options)\n}\n\n/**\n * Custom Summary allows you to distill a piece of audio into a few impactful sentences.\nYou can give the model context to obtain more targeted results while outputting the results in a variety of formats described in human language.\n\n * @summary Summarize a transcript using LeMUR\n */\nexport const lemurSummary = <TData = AxiosResponse<LemurSummaryResponse>>(\n lemurSummaryParams: LemurSummaryParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/lemur/v3/generate/summary\", lemurSummaryParams, options)\n}\n\n/**\n * Question & Answer allows you to ask free-form questions about a single transcript or a group of transcripts.\nThe questions can be any whose answers you find useful, such as judging whether a caller is likely to become a customer or whether all items on a meeting's agenda were covered.\n\n * @summary Ask questions using LeMUR\n */\nexport const lemurQuestionAnswer = <TData = AxiosResponse<LemurQuestionAnswerResponse>>(\n lemurQuestionAnswerParams: LemurQuestionAnswerParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/lemur/v3/generate/question-answer\", lemurQuestionAnswerParams, options)\n}\n\n/**\n * Retrieve a LeMUR response that was previously generated.\n\n * @summary Retrieve LeMUR response\n */\nexport const getLemurResponse = <TData = AxiosResponse<LemurResponse>>(\n requestId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/lemur/v3/${requestId}`, options)\n}\n\n/**\n * Delete the data for a previously submitted LeMUR request.\nThe LLM response data, as well as any context provided in the original request will be removed.\n\n * @summary Purge LeMUR request data\n */\nexport const purgeLemurRequestData = <TData = AxiosResponse<PurgeLemurRequestDataResponse>>(\n requestId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/lemur/v3/${requestId}`, options)\n}\n\nexport type UploadFileResult = AxiosResponse<UploadedFile>\nexport type CreateTranscriptResult = AxiosResponse<Transcript>\nexport type ListTranscriptsResult = AxiosResponse<TranscriptList>\nexport type GetTranscriptResult = AxiosResponse<Transcript>\nexport type DeleteTranscriptResult = AxiosResponse<Transcript>\nexport type GetSubtitlesResult = AxiosResponse<string>\nexport type GetTranscriptSentencesResult = AxiosResponse<SentencesResponse>\nexport type GetTranscriptParagraphsResult = AxiosResponse<ParagraphsResponse>\nexport type WordSearchResult = AxiosResponse<WordSearchResponse>\nexport type GetRedactedAudioResult = AxiosResponse<RedactedAudioResponse>\nexport type CreateTemporaryTokenResult = AxiosResponse<RealtimeTemporaryTokenResponse>\nexport type LemurTaskResult = AxiosResponse<LemurTaskResponse>\nexport type LemurSummaryResult = AxiosResponse<LemurSummaryResponse>\nexport type LemurQuestionAnswerResult = AxiosResponse<LemurQuestionAnswerResponse>\nexport type GetLemurResponseResult = AxiosResponse<LemurResponse>\nexport type PurgeLemurRequestDataResult = AxiosResponse<PurgeLemurRequestDataResponse>\n\nexport const getUploadFileResponseMock = (\n overrideResponse: Partial<UploadedFile> = {}\n): UploadedFile => ({ upload_url: faker.internet.url(), ...overrideResponse })\n\nexport const getCreateTranscriptResponseAutoHighlightsResultMock = (\n overrideResponse: Partial<AutoHighlightsResult> = {}\n): AutoHighlightsResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n count: faker.number.int({ min: undefined, max: undefined }),\n rank: faker.number.float(),\n text: faker.string.alpha(20),\n timestamps: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n }))\n })\n )\n },\n ...overrideResponse\n})\n\nexport const getCreateTranscriptResponseContentSafetyLabelsResultMock = (\n overrideResponse: Partial<ContentSafetyLabelsResult> = {}\n): ContentSafetyLabelsResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n labels: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n label: faker.string.alpha(20),\n confidence: faker.number.float(),\n severity: faker.number.float()\n })\n ),\n sentences_idx_start: faker.number.int({ min: undefined, max: undefined }),\n sentences_idx_end: faker.number.int({ min: undefined, max: undefined }),\n timestamp: {\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n }\n })\n ),\n summary: {\n [faker.string.alphanumeric(5)]: faker.number.float()\n },\n severity_score_summary: {\n [faker.string.alphanumeric(5)]: {\n low: faker.number.float(),\n medium: faker.number.float(),\n high: faker.number.float()\n }\n }\n },\n ...overrideResponse\n})\n\nexport const getCreateTranscriptResponseTopicDetectionModelResultMock = (\n overrideResponse: Partial<TopicDetectionModelResult> = {}\n): TopicDetectionModelResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n labels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({ relevance: faker.number.float(), label: faker.string.alpha(20) })\n ),\n undefined\n ]),\n timestamp: faker.helpers.arrayElement([\n {\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n },\n undefined\n ])\n })\n ),\n summary: {\n [faker.string.alphanumeric(5)]: faker.number.float()\n }\n },\n ...overrideResponse\n})\n\nexport const getCreateTranscriptResponseMock = (\n overrideResponse: Partial<Transcript> = {}\n): Transcript => ({\n id: faker.string.uuid(),\n audio_url: faker.internet.url(),\n status: faker.helpers.arrayElement(Object.values(TranscriptStatus)),\n language_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptLanguageCode)),\n faker.string.alpha(20)\n ]),\n undefined\n ]),\n language_detection: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n language_confidence_threshold: faker.number.float(),\n language_confidence: faker.number.float(),\n speech_model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SpeechModel)),\n null\n ]),\n text: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n words: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n utterances: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n confidence: faker.helpers.arrayElement([faker.number.float(), undefined]),\n audio_duration: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n punctuate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n format_text: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n disfluencies: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n multichannel: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n audio_channels: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n webhook_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webhook_status_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n webhook_auth: faker.datatype.boolean(),\n webhook_auth_header_name: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n speed_boost: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n auto_highlights: faker.datatype.boolean(),\n auto_highlights_result: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getCreateTranscriptResponseAutoHighlightsResultMock() },\n null\n ]),\n undefined\n ]),\n audio_start_from: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n audio_end_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n word_boost: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n boost_param: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n filter_profanity: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n redact_pii: faker.datatype.boolean(),\n redact_pii_audio: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n redact_pii_audio_quality: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(RedactPiiAudioQuality)),\n null\n ]),\n undefined\n ]),\n redact_pii_policies: faker.helpers.arrayElement([\n faker.helpers.arrayElement([[], null]),\n undefined\n ]),\n redact_pii_sub: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubstitutionPolicy)),\n undefined\n ]),\n speaker_labels: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n speakers_expected: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n content_safety: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n content_safety_labels: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getCreateTranscriptResponseContentSafetyLabelsResultMock() },\n null\n ]),\n undefined\n ]),\n iab_categories: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n iab_categories_result: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getCreateTranscriptResponseTopicDetectionModelResultMock() },\n null\n ]),\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n keyterms_prompt: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n auto_chapters: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n chapters: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n summarization: faker.datatype.boolean(),\n summary_type: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n summary_model: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n summary: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n custom_topics: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n topics: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n sentiment_analysis_results: faker.helpers.arrayElement([\n faker.helpers.arrayElement([[], null]),\n undefined\n ]),\n entity_detection: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n entities: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n speech_threshold: faker.helpers.arrayElement([faker.number.float(), undefined]),\n throttled: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n error: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n language_model: faker.string.alpha(20),\n acoustic_model: faker.string.alpha(20),\n ...overrideResponse\n})\n\nexport const getListTranscriptsResponseMock = (\n overrideResponse: Partial<TranscriptList> = {}\n): TranscriptList => ({\n page_details: {\n limit: faker.number.int({ min: undefined, max: undefined }),\n result_count: faker.number.int({ min: undefined, max: undefined }),\n current_url: faker.internet.url(),\n prev_url: faker.internet.url(),\n next_url: faker.internet.url()\n },\n transcripts: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n id: faker.string.uuid(),\n resource_url: faker.internet.url(),\n status: faker.helpers.arrayElement(Object.values(TranscriptStatus)),\n created: faker.helpers.fromRegExp(\"^(?:(\\d{4}-\\d{2}-\\d{2})T(\\d{2}:\\d{2}:\\d{2}(?:\\.\\d+)?))$\"),\n completed: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_url: faker.internet.url(),\n error: faker.helpers.arrayElement([faker.string.alpha(20), null])\n })\n ),\n ...overrideResponse\n})\n\nexport const getGetTranscriptResponseAutoHighlightsResultMock = (\n overrideResponse: Partial<AutoHighlightsResult> = {}\n): AutoHighlightsResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n count: faker.number.int({ min: undefined, max: undefined }),\n rank: faker.number.float(),\n text: faker.string.alpha(20),\n timestamps: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n }))\n })\n )\n },\n ...overrideResponse\n})\n\nexport const getGetTranscriptResponseContentSafetyLabelsResultMock = (\n overrideResponse: Partial<ContentSafetyLabelsResult> = {}\n): ContentSafetyLabelsResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n labels: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n label: faker.string.alpha(20),\n confidence: faker.number.float(),\n severity: faker.number.float()\n })\n ),\n sentences_idx_start: faker.number.int({ min: undefined, max: undefined }),\n sentences_idx_end: faker.number.int({ min: undefined, max: undefined }),\n timestamp: {\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n }\n })\n ),\n summary: {\n [faker.string.alphanumeric(5)]: faker.number.float()\n },\n severity_score_summary: {\n [faker.string.alphanumeric(5)]: {\n low: faker.number.float(),\n medium: faker.number.float(),\n high: faker.number.float()\n }\n }\n },\n ...overrideResponse\n})\n\nexport const getGetTranscriptResponseTopicDetectionModelResultMock = (\n overrideResponse: Partial<TopicDetectionModelResult> = {}\n): TopicDetectionModelResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n labels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({ relevance: faker.number.float(), label: faker.string.alpha(20) })\n ),\n undefined\n ]),\n timestamp: faker.helpers.arrayElement([\n {\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n },\n undefined\n ])\n })\n ),\n summary: {\n [faker.string.alphanumeric(5)]: faker.number.float()\n }\n },\n ...overrideResponse\n})\n\nexport const getGetTranscriptResponseMock = (\n overrideResponse: Partial<Transcript> = {}\n): Transcript => ({\n id: faker.string.uuid(),\n audio_url: faker.internet.url(),\n status: faker.helpers.arrayElement(Object.values(TranscriptStatus)),\n language_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptLanguageCode)),\n faker.string.alpha(20)\n ]),\n undefined\n ]),\n language_detection: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n language_confidence_threshold: faker.number.float(),\n language_confidence: faker.number.float(),\n speech_model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SpeechModel)),\n null\n ]),\n text: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n words: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n utterances: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n confidence: faker.helpers.arrayElement([faker.number.float(), undefined]),\n audio_duration: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n punctuate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n format_text: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n disfluencies: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n multichannel: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n audio_channels: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n webhook_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webhook_status_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n webhook_auth: faker.datatype.boolean(),\n webhook_auth_header_name: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n speed_boost: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n auto_highlights: faker.datatype.boolean(),\n auto_highlights_result: faker.helpers.arrayElement([\n faker.helpers.arrayElement([{ ...getGetTranscriptResponseAutoHighlightsResultMock() }, null]),\n undefined\n ]),\n audio_start_from: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n audio_end_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n word_boost: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n boost_param: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n filter_profanity: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n redact_pii: faker.datatype.boolean(),\n redact_pii_audio: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n redact_pii_audio_quality: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(RedactPiiAudioQuality)),\n null\n ]),\n undefined\n ]),\n redact_pii_policies: faker.helpers.arrayElement([\n faker.helpers.arrayElement([[], null]),\n undefined\n ]),\n redact_pii_sub: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubstitutionPolicy)),\n undefined\n ]),\n speaker_labels: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n speakers_expected: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n content_safety: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n content_safety_labels: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getGetTranscriptResponseContentSafetyLabelsResultMock() },\n null\n ]),\n undefined\n ]),\n iab_categories: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n iab_categories_result: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getGetTranscriptResponseTopicDetectionModelResultMock() },\n null\n ]),\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n keyterms_prompt: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n auto_chapters: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n chapters: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n summarization: faker.datatype.boolean(),\n summary_type: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n summary_model: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n summary: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n custom_topics: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n topics: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n sentiment_analysis_results: faker.helpers.arrayElement([\n faker.helpers.arrayElement([[], null]),\n undefined\n ]),\n entity_detection: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n entities: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n speech_threshold: faker.helpers.arrayElement([faker.number.float(), undefined]),\n throttled: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n error: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n language_model: faker.string.alpha(20),\n acoustic_model: faker.string.alpha(20),\n ...overrideResponse\n})\n\nexport const getDeleteTranscriptResponseAutoHighlightsResultMock = (\n overrideResponse: Partial<AutoHighlightsResult> = {}\n): AutoHighlightsResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n count: faker.number.int({ min: undefined, max: undefined }),\n rank: faker.number.float(),\n text: faker.string.alpha(20),\n timestamps: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n }))\n })\n )\n },\n ...overrideResponse\n})\n\nexport const getDeleteTranscriptResponseContentSafetyLabelsResultMock = (\n overrideResponse: Partial<ContentSafetyLabelsResult> = {}\n): ContentSafetyLabelsResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n labels: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n label: faker.string.alpha(20),\n confidence: faker.number.float(),\n severity: faker.number.float()\n })\n ),\n sentences_idx_start: faker.number.int({ min: undefined, max: undefined }),\n sentences_idx_end: faker.number.int({ min: undefined, max: undefined }),\n timestamp: {\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n }\n })\n ),\n summary: {\n [faker.string.alphanumeric(5)]: faker.number.float()\n },\n severity_score_summary: {\n [faker.string.alphanumeric(5)]: {\n low: faker.number.float(),\n medium: faker.number.float(),\n high: faker.number.float()\n }\n }\n },\n ...overrideResponse\n})\n\nexport const getDeleteTranscriptResponseTopicDetectionModelResultMock = (\n overrideResponse: Partial<TopicDetectionModelResult> = {}\n): TopicDetectionModelResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n labels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({ relevance: faker.number.float(), label: faker.string.alpha(20) })\n ),\n undefined\n ]),\n timestamp: faker.helpers.arrayElement([\n {\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n },\n undefined\n ])\n })\n ),\n summary: {\n [faker.string.alphanumeric(5)]: faker.number.float()\n }\n },\n ...overrideResponse\n})\n\nexport const getDeleteTranscriptResponseMock = (\n overrideResponse: Partial<Transcript> = {}\n): Transcript => ({\n id: faker.string.uuid(),\n audio_url: faker.internet.url(),\n status: faker.helpers.arrayElement(Object.values(TranscriptStatus)),\n language_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptLanguageCode)),\n faker.string.alpha(20)\n ]),\n undefined\n ]),\n language_detection: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n language_confidence_threshold: faker.number.float(),\n language_confidence: faker.number.float(),\n speech_model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SpeechModel)),\n null\n ]),\n text: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n words: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n utterances: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n confidence: faker.helpers.arrayElement([faker.number.float(), undefined]),\n audio_duration: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n punctuate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n format_text: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n disfluencies: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n multichannel: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n audio_channels: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n webhook_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webhook_status_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n webhook_auth: faker.datatype.boolean(),\n webhook_auth_header_name: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n speed_boost: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n auto_highlights: faker.datatype.boolean(),\n auto_highlights_result: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getDeleteTranscriptResponseAutoHighlightsResultMock() },\n null\n ]),\n undefined\n ]),\n audio_start_from: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n audio_end_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n word_boost: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n boost_param: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n filter_profanity: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n redact_pii: faker.datatype.boolean(),\n redact_pii_audio: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n redact_pii_audio_quality: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(RedactPiiAudioQuality)),\n null\n ]),\n undefined\n ]),\n redact_pii_policies: faker.helpers.arrayElement([\n faker.helpers.arrayElement([[], null]),\n undefined\n ]),\n redact_pii_sub: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubstitutionPolicy)),\n undefined\n ]),\n speaker_labels: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n speakers_expected: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n content_safety: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n content_safety_labels: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getDeleteTranscriptResponseContentSafetyLabelsResultMock() },\n null\n ]),\n undefined\n ]),\n iab_categories: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n iab_categories_result: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getDeleteTranscriptResponseTopicDetectionModelResultMock() },\n null\n ]),\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n keyterms_prompt: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n auto_chapters: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n chapters: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n summarization: faker.datatype.boolean(),\n summary_type: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n summary_model: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n summary: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n custom_topics: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n topics: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n sentiment_analysis_results: faker.helpers.arrayElement([\n faker.helpers.arrayElement([[], null]),\n undefined\n ]),\n entity_detection: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n entities: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n speech_threshold: faker.helpers.arrayElement([faker.number.float(), undefined]),\n throttled: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n error: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n language_model: faker.string.alpha(20),\n acoustic_model: faker.string.alpha(20),\n ...overrideResponse\n})\n\nexport const getGetSubtitlesResponseMock = (): string => faker.word.sample()\n\nexport const getGetTranscriptSentencesResponseMock = (\n overrideResponse: Partial<SentencesResponse> = {}\n): SentencesResponse => ({\n id: faker.string.uuid(),\n confidence: faker.number.float(),\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n sentences: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.float(),\n words: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n confidence: faker.number.float(),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n text: faker.string.alpha(20),\n channel: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n speaker: faker.helpers.arrayElement([faker.string.alpha(20), null])\n })\n ),\n channel: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n speaker: faker.helpers.arrayElement([faker.string.alpha(20), null])\n })\n ),\n ...overrideResponse\n})\n\nexport const getGetTranscriptParagraphsResponseMock = (\n overrideResponse: Partial<ParagraphsResponse> = {}\n): ParagraphsResponse => ({\n id: faker.string.uuid(),\n confidence: faker.number.float(),\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n paragraphs: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.float(),\n words: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n confidence: faker.number.float(),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n text: faker.string.alpha(20),\n channel: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n speaker: faker.helpers.arrayElement([faker.string.alpha(20), null])\n })\n )\n })\n ),\n ...overrideResponse\n})\n\nexport const getWordSearchResponseMock = (\n overrideResponse: Partial<WordSearchResponse> = {}\n): WordSearchResponse => ({\n id: faker.string.uuid(),\n total_count: faker.number.int({ min: undefined, max: undefined }),\n matches: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n count: faker.number.int({ min: undefined, max: undefined }),\n timestamps: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.number.int({ min: undefined, max: undefined })\n )\n ),\n indexes: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => faker.number.int({ min: undefined, max: undefined })\n )\n })\n ),\n ...overrideResponse\n})\n\nexport const getGetRedactedAudioResponseMock = (\n overrideResponse: Partial<RedactedAudioResponse> = {}\n): RedactedAudioResponse => ({\n status: faker.helpers.arrayElement(Object.values(RedactedAudioStatus)),\n redacted_audio_url: faker.internet.url(),\n ...overrideResponse\n})\n\nexport const getCreateTemporaryTokenResponseMock = (\n overrideResponse: Partial<RealtimeTemporaryTokenResponse> = {}\n): RealtimeTemporaryTokenResponse => ({ token: faker.string.alpha(20), ...overrideResponse })\n\nexport const getLemurTaskResponseMock = (): LemurTaskResponse => ({\n ...{\n ...{ response: faker.string.alpha(20) },\n ...{\n request_id: faker.string.uuid(),\n usage: {\n input_tokens: faker.number.int({ min: 0, max: undefined }),\n output_tokens: faker.number.int({ min: 0, max: undefined })\n }\n }\n }\n})\n\nexport const getLemurSummaryResponseMock = (): LemurSummaryResponse => ({\n ...{\n ...{ response: faker.string.alpha(20) },\n ...{\n request_id: faker.string.uuid(),\n usage: {\n input_tokens: faker.number.int({ min: 0, max: undefined }),\n output_tokens: faker.number.int({ min: 0, max: undefined })\n }\n }\n }\n})\n\nexport const getLemurQuestionAnswerResponseMock = (): LemurQuestionAnswerResponse => ({\n ...{\n request_id: faker.string.uuid(),\n usage: {\n input_tokens: faker.number.int({ min: 0, max: undefined }),\n output_tokens: faker.number.int({ min: 0, max: undefined })\n }\n },\n ...{\n response: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({ question: faker.string.alpha(20), answer: faker.string.alpha(20) })\n )\n }\n})\n\nexport const getGetLemurResponseResponseLemurStringResponseMock = (\n overrideResponse: Partial<LemurStringResponse> = {}\n): LemurStringResponse => ({\n ...{\n ...{ response: faker.string.alpha(20) },\n ...{\n request_id: faker.string.uuid(),\n usage: {\n input_tokens: faker.number.int({ min: 0, max: undefined }),\n output_tokens: faker.number.int({ min: 0, max: undefined })\n }\n }\n },\n ...overrideResponse\n})\n\nexport const getGetLemurResponseResponseLemurQuestionAnswerResponseMock = (\n overrideResponse: Partial<LemurQuestionAnswerResponse> = {}\n): LemurQuestionAnswerResponse => ({\n ...{\n ...{\n request_id: faker.string.uuid(),\n usage: {\n input_tokens: faker.number.int({ min: 0, max: undefined }),\n output_tokens: faker.number.int({ min: 0, max: undefined })\n }\n },\n ...{\n response: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({ question: faker.string.alpha(20), answer: faker.string.alpha(20) })\n )\n }\n },\n ...overrideResponse\n})\n\nexport const getGetLemurResponseResponseMock = (): LemurResponse =>\n faker.helpers.arrayElement([\n { ...getGetLemurResponseResponseLemurStringResponseMock() },\n { ...getGetLemurResponseResponseLemurQuestionAnswerResponseMock() }\n ])\n\nexport const getPurgeLemurRequestDataResponseMock = (\n overrideResponse: Partial<PurgeLemurRequestDataResponse> = {}\n): PurgeLemurRequestDataResponse => ({\n request_id: faker.string.uuid(),\n request_id_to_purge: faker.string.uuid(),\n deleted: faker.datatype.boolean(),\n ...overrideResponse\n})\n\nexport const getUploadFileMockHandler = (\n overrideResponse?:\n | UploadedFile\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<UploadedFile> | UploadedFile)\n) => {\n return http.post(\"https://api.assemblyai.com/v2/upload\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getUploadFileResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getCreateTranscriptMockHandler = (\n overrideResponse?:\n | Transcript\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<Transcript> | Transcript)\n) => {\n return http.post(\"https://api.assemblyai.com/v2/transcript\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getCreateTranscriptResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getListTranscriptsMockHandler = (\n overrideResponse?:\n | TranscriptList\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<TranscriptList> | TranscriptList)\n) => {\n return http.get(\"https://api.assemblyai.com/v2/transcript\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getListTranscriptsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getGetTranscriptMockHandler = (\n overrideResponse?:\n | Transcript\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Transcript> | Transcript)\n) => {\n return http.get(\"https://api.assemblyai.com/v2/transcript/:transcriptId\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getGetTranscriptResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDeleteTranscriptMockHandler = (\n overrideResponse?:\n | Transcript\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<Transcript> | Transcript)\n) => {\n return http.delete(\"https://api.assemblyai.com/v2/transcript/:transcriptId\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDeleteTranscriptResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getGetSubtitlesMockHandler = (\n overrideResponse?:\n | string\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<string> | string)\n) => {\n return http.get(\n \"https://api.assemblyai.com/v2/transcript/:transcriptId/:subtitleFormat\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(getGetSubtitlesResponseMock(), {\n status: 200,\n headers: { \"Content-Type\": \"text/plain\" }\n })\n }\n )\n}\n\nexport const getGetTranscriptSentencesMockHandler = (\n overrideResponse?:\n | SentencesResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<SentencesResponse> | SentencesResponse)\n) => {\n return http.get(\n \"https://api.assemblyai.com/v2/transcript/:transcriptId/sentences\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getGetTranscriptSentencesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getGetTranscriptParagraphsMockHandler = (\n overrideResponse?:\n | ParagraphsResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ParagraphsResponse> | ParagraphsResponse)\n) => {\n return http.get(\n \"https://api.assemblyai.com/v2/transcript/:transcriptId/paragraphs\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getGetTranscriptParagraphsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getWordSearchMockHandler = (\n overrideResponse?:\n | WordSearchResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<WordSearchResponse> | WordSearchResponse)\n) => {\n return http.get(\n \"https://api.assemblyai.com/v2/transcript/:transcriptId/word-search\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getWordSearchResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getGetRedactedAudioMockHandler = (\n overrideResponse?:\n | RedactedAudioResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<RedactedAudioResponse> | RedactedAudioResponse)\n) => {\n return http.get(\n \"https://api.assemblyai.com/v2/transcript/:transcriptId/redacted-audio\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getGetRedactedAudioResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getCreateTemporaryTokenMockHandler = (\n overrideResponse?:\n | RealtimeTemporaryTokenResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<RealtimeTemporaryTokenResponse> | RealtimeTemporaryTokenResponse)\n) => {\n return http.post(\"https://api.assemblyai.com/v2/realtime/token\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getCreateTemporaryTokenResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getLemurTaskMockHandler = (\n overrideResponse?:\n | LemurTaskResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<LemurTaskResponse> | LemurTaskResponse)\n) => {\n return http.post(\"https://api.assemblyai.com/lemur/v3/generate/task\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getLemurTaskResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getLemurSummaryMockHandler = (\n overrideResponse?:\n | LemurSummaryResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<LemurSummaryResponse> | LemurSummaryResponse)\n) => {\n return http.post(\"https://api.assemblyai.com/lemur/v3/generate/summary\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getLemurSummaryResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getLemurQuestionAnswerMockHandler = (\n overrideResponse?:\n | LemurQuestionAnswerResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<LemurQuestionAnswerResponse> | LemurQuestionAnswerResponse)\n) => {\n return http.post(\"https://api.assemblyai.com/lemur/v3/generate/question-answer\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getLemurQuestionAnswerResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getGetLemurResponseMockHandler = (\n overrideResponse?:\n | LemurResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<LemurResponse> | LemurResponse)\n) => {\n return http.get(\"https://api.assemblyai.com/lemur/v3/:requestId\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getGetLemurResponseResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getPurgeLemurRequestDataMockHandler = (\n overrideResponse?:\n | PurgeLemurRequestDataResponse\n | ((\n info: Parameters<Parameters<typeof http.delete>[1]>[0]\n ) => Promise<PurgeLemurRequestDataResponse> | PurgeLemurRequestDataResponse)\n) => {\n return http.delete(\"https://api.assemblyai.com/lemur/v3/:requestId\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getPurgeLemurRequestDataResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\nexport const getAssemblyAIAPIMock = () => [\n getUploadFileMockHandler(),\n getCreateTranscriptMockHandler(),\n getListTranscriptsMockHandler(),\n getGetTranscriptMockHandler(),\n getDeleteTranscriptMockHandler(),\n getGetSubtitlesMockHandler(),\n getGetTranscriptSentencesMockHandler(),\n getGetTranscriptParagraphsMockHandler(),\n getWordSearchMockHandler(),\n getGetRedactedAudioMockHandler(),\n getCreateTemporaryTokenMockHandler(),\n getLemurTaskMockHandler(),\n getLemurSummaryMockHandler(),\n getLemurQuestionAnswerMockHandler(),\n getGetLemurResponseMockHandler(),\n getPurgeLemurRequestDataMockHandler()\n]\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\nexport * from \"./audioIntelligenceModelStatus\"\nexport * from \"./autoHighlightResult\"\nexport * from \"./autoHighlightsResult\"\nexport * from \"./badRequestResponse\"\nexport * from \"./cannotAccessUploadedFileResponse\"\nexport * from \"./chapter\"\nexport * from \"./contentSafetyLabel\"\nexport * from \"./contentSafetyLabelResult\"\nexport * from \"./contentSafetyLabelsResult\"\nexport * from \"./contentSafetyLabelsResultSeverityScoreSummary\"\nexport * from \"./contentSafetyLabelsResultSummary\"\nexport * from \"./createRealtimeTemporaryTokenParams\"\nexport * from \"./entity\"\nexport * from \"./entityType\"\nexport * from \"./error\"\nexport * from \"./gatewayTimeoutResponse\"\nexport * from \"./getSubtitlesParams\"\nexport * from \"./internalServerErrorResponse\"\nexport * from \"./lemurActionItemsParams\"\nexport * from \"./lemurActionItemsParamsAllOf\"\nexport * from \"./lemurActionItemsResponse\"\nexport * from \"./lemurBaseParams\"\nexport * from \"./lemurBaseParamsContext\"\nexport * from \"./lemurBaseParamsContextOneOf\"\nexport * from \"./lemurBaseParamsFinalModel\"\nexport * from \"./lemurBaseResponse\"\nexport * from \"./lemurModel\"\nexport * from \"./lemurQuestion\"\nexport * from \"./lemurQuestionAnswer\"\nexport * from \"./lemurQuestionAnswerParams\"\nexport * from \"./lemurQuestionAnswerParamsAllOf\"\nexport * from \"./lemurQuestionAnswerResponse\"\nexport * from \"./lemurQuestionAnswerResponseAllOf\"\nexport * from \"./lemurQuestionContext\"\nexport * from \"./lemurQuestionContextOneOf\"\nexport * from \"./lemurResponse\"\nexport * from \"./lemurStringResponse\"\nexport * from \"./lemurStringResponseAllOf\"\nexport * from \"./lemurSummaryParams\"\nexport * from \"./lemurSummaryParamsAllOf\"\nexport * from \"./lemurSummaryResponse\"\nexport * from \"./lemurTaskParams\"\nexport * from \"./lemurTaskParamsAllOf\"\nexport * from \"./lemurTaskResponse\"\nexport * from \"./lemurUsage\"\nexport * from \"./listTranscriptParams\"\nexport * from \"./listTranscriptsParams\"\nexport * from \"./notFoundResponse\"\nexport * from \"./pageDetails\"\nexport * from \"./pageDetailsNextUrl\"\nexport * from \"./pageDetailsPrevUrl\"\nexport * from \"./paragraphsResponse\"\nexport * from \"./piiPolicy\"\nexport * from \"./purgeLemurRequestDataResponse\"\nexport * from \"./realtimeTemporaryTokenResponse\"\nexport * from \"./redactedAudioNotification\"\nexport * from \"./redactedAudioResponse\"\nexport * from \"./redactedAudioStatus\"\nexport * from \"./redactPiiAudioQuality\"\nexport * from \"./sentencesResponse\"\nexport * from \"./sentiment\"\nexport * from \"./sentimentAnalysisResult\"\nexport * from \"./sentimentAnalysisResultChannel\"\nexport * from \"./sentimentAnalysisResultSpeaker\"\nexport * from \"./serviceUnavailableResponse\"\nexport * from \"./severityScoreSummary\"\nexport * from \"./speechModel\"\nexport * from \"./substitutionPolicy\"\nexport * from \"./subtitleFormat\"\nexport * from \"./summaryModel\"\nexport * from \"./summaryType\"\nexport * from \"./timestamp\"\nexport * from \"./tooManyRequestsResponse\"\nexport * from \"./topicDetectionModelResult\"\nexport * from \"./topicDetectionModelResultSummary\"\nexport * from \"./topicDetectionResult\"\nexport * from \"./topicDetectionResultLabelsItem\"\nexport * from \"./transcript\"\nexport * from \"./transcriptAudioDuration\"\nexport * from \"./transcriptAudioEndAt\"\nexport * from \"./transcriptAudioStartFrom\"\nexport * from \"./transcriptAutoChapters\"\nexport * from \"./transcriptAutoHighlightsResult\"\nexport * from \"./transcriptBoostParam\"\nexport * from \"./transcriptBoostParamProperty\"\nexport * from \"./transcriptChapters\"\nexport * from \"./transcriptConfidence\"\nexport * from \"./transcriptContentSafety\"\nexport * from \"./transcriptContentSafetyLabels\"\nexport * from \"./transcriptCustomSpelling\"\nexport * from \"./transcriptCustomSpellingProperty\"\nexport * from \"./transcriptCustomTopics\"\nexport * from \"./transcriptDisfluencies\"\nexport * from \"./transcriptEntities\"\nexport * from \"./transcriptEntityDetection\"\nexport * from \"./transcriptFilterProfanity\"\nexport * from \"./transcriptFormatText\"\nexport * from \"./transcriptIabCategories\"\nexport * from \"./transcriptIabCategoriesResult\"\nexport * from \"./transcriptLanguageCode\"\nexport * from \"./transcriptLanguageCodeProperty\"\nexport * from \"./transcriptLanguageConfidence\"\nexport * from \"./transcriptLanguageConfidenceThreshold\"\nexport * from \"./transcriptLanguageDetection\"\nexport * from \"./transcriptList\"\nexport * from \"./transcriptListItem\"\nexport * from \"./transcriptListItemCompleted\"\nexport * from \"./transcriptListItemError\"\nexport * from \"./transcriptMultichannel\"\nexport * from \"./transcriptOptionalParams\"\nexport * from \"./transcriptOptionalParamsLanguageCode\"\nexport * from \"./transcriptOptionalParamsLanguageCodeOneOf\"\nexport * from \"./transcriptOptionalParamsRedactPiiSub\"\nexport * from \"./transcriptOptionalParamsSpeakersExpected\"\nexport * from \"./transcriptOptionalParamsSpeechModel\"\nexport * from \"./transcriptOptionalParamsSpeechThreshold\"\nexport * from \"./transcriptOptionalParamsWebhookAuthHeaderName\"\nexport * from \"./transcriptOptionalParamsWebhookAuthHeaderValue\"\nexport * from \"./transcriptParagraph\"\nexport * from \"./transcriptParams\"\nexport * from \"./transcriptParamsAllOf\"\nexport * from \"./transcriptPunctuate\"\nexport * from \"./transcriptReadyNotification\"\nexport * from \"./transcriptReadyStatus\"\nexport * from \"./transcriptRedactPiiAudio\"\nexport * from \"./transcriptRedactPiiAudioQuality\"\nexport * from \"./transcriptRedactPiiPolicies\"\nexport * from \"./transcriptSentence\"\nexport * from \"./transcriptSentenceChannel\"\nexport * from \"./transcriptSentenceSpeaker\"\nexport * from \"./transcriptSentimentAnalysis\"\nexport * from \"./transcriptSentimentAnalysisResults\"\nexport * from \"./transcriptSpeakerLabels\"\nexport * from \"./transcriptSpeakersExpected\"\nexport * from \"./transcriptSpeechModel\"\nexport * from \"./transcriptSpeechThreshold\"\nexport * from \"./transcriptSpeedBoost\"\nexport * from \"./transcriptStatus\"\nexport * from \"./transcriptSummary\"\nexport * from \"./transcriptSummaryModel\"\nexport * from \"./transcriptSummaryType\"\nexport * from \"./transcriptText\"\nexport * from \"./transcriptThrottled\"\nexport * from \"./transcriptUtterance\"\nexport * from \"./transcriptUtteranceChannel\"\nexport * from \"./transcriptUtterances\"\nexport * from \"./transcriptWebhookAuthHeaderName\"\nexport * from \"./transcriptWebhookNotification\"\nexport * from \"./transcriptWebhookStatusCode\"\nexport * from \"./transcriptWebhookUrl\"\nexport * from \"./transcriptWord\"\nexport * from \"./transcriptWordChannel\"\nexport * from \"./transcriptWordSpeaker\"\nexport * from \"./transcriptWords\"\nexport * from \"./unauthorizedResponse\"\nexport * from \"./uploadedFile\"\nexport * from \"./wordSearchMatch\"\nexport * from \"./wordSearchParams\"\nexport * from \"./wordSearchResponse\"\nexport * from \"./wordSearchTimestamp\"\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * Either success, or unavailable in the rare case that the model failed\n */\nexport type AudioIntelligenceModelStatus =\n (typeof AudioIntelligenceModelStatus)[keyof typeof AudioIntelligenceModelStatus]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const AudioIntelligenceModelStatus = {\n success: \"success\",\n unavailable: \"unavailable\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The type of entity for the detected entity\n */\nexport type EntityType = (typeof EntityType)[keyof typeof EntityType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const EntityType = {\n account_number: \"account_number\",\n banking_information: \"banking_information\",\n blood_type: \"blood_type\",\n credit_card_cvv: \"credit_card_cvv\",\n credit_card_expiration: \"credit_card_expiration\",\n credit_card_number: \"credit_card_number\",\n date: \"date\",\n date_interval: \"date_interval\",\n date_of_birth: \"date_of_birth\",\n drivers_license: \"drivers_license\",\n drug: \"drug\",\n duration: \"duration\",\n email_address: \"email_address\",\n event: \"event\",\n filename: \"filename\",\n gender_sexuality: \"gender_sexuality\",\n healthcare_number: \"healthcare_number\",\n injury: \"injury\",\n ip_address: \"ip_address\",\n language: \"language\",\n location: \"location\",\n marital_status: \"marital_status\",\n medical_condition: \"medical_condition\",\n medical_process: \"medical_process\",\n money_amount: \"money_amount\",\n nationality: \"nationality\",\n number_sequence: \"number_sequence\",\n occupation: \"occupation\",\n organization: \"organization\",\n passport_number: \"passport_number\",\n password: \"password\",\n person_age: \"person_age\",\n person_name: \"person_name\",\n phone_number: \"phone_number\",\n physical_attribute: \"physical_attribute\",\n political_affiliation: \"political_affiliation\",\n religion: \"religion\",\n statistics: \"statistics\",\n time: \"time\",\n url: \"url\",\n us_social_security_number: \"us_social_security_number\",\n username: \"username\",\n vehicle_id: \"vehicle_id\",\n zodiac_sign: \"zodiac_sign\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The model that is used for the final prompt after compression is performed.\n\n */\nexport type LemurModel = (typeof LemurModel)[keyof typeof LemurModel]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const LemurModel = {\n \"anthropic/claude-3-5-sonnet\": \"anthropic/claude-3-5-sonnet\",\n \"anthropic/claude-3-opus\": \"anthropic/claude-3-opus\",\n \"anthropic/claude-3-haiku\": \"anthropic/claude-3-haiku\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The type of PII to redact\n */\nexport type PiiPolicy = (typeof PiiPolicy)[keyof typeof PiiPolicy]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PiiPolicy = {\n account_number: \"account_number\",\n banking_information: \"banking_information\",\n blood_type: \"blood_type\",\n credit_card_cvv: \"credit_card_cvv\",\n credit_card_expiration: \"credit_card_expiration\",\n credit_card_number: \"credit_card_number\",\n date: \"date\",\n date_interval: \"date_interval\",\n date_of_birth: \"date_of_birth\",\n drivers_license: \"drivers_license\",\n drug: \"drug\",\n duration: \"duration\",\n email_address: \"email_address\",\n event: \"event\",\n filename: \"filename\",\n gender_sexuality: \"gender_sexuality\",\n healthcare_number: \"healthcare_number\",\n injury: \"injury\",\n ip_address: \"ip_address\",\n language: \"language\",\n location: \"location\",\n marital_status: \"marital_status\",\n medical_condition: \"medical_condition\",\n medical_process: \"medical_process\",\n money_amount: \"money_amount\",\n nationality: \"nationality\",\n number_sequence: \"number_sequence\",\n occupation: \"occupation\",\n organization: \"organization\",\n passport_number: \"passport_number\",\n password: \"password\",\n person_age: \"person_age\",\n person_name: \"person_name\",\n phone_number: \"phone_number\",\n physical_attribute: \"physical_attribute\",\n political_affiliation: \"political_affiliation\",\n religion: \"religion\",\n statistics: \"statistics\",\n time: \"time\",\n url: \"url\",\n us_social_security_number: \"us_social_security_number\",\n username: \"username\",\n vehicle_id: \"vehicle_id\",\n zodiac_sign: \"zodiac_sign\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The status of the redacted audio\n */\nexport type RedactedAudioStatus = (typeof RedactedAudioStatus)[keyof typeof RedactedAudioStatus]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const RedactedAudioStatus = {\n redacted_audio_ready: \"redacted_audio_ready\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * Controls the filetype of the audio created by redact_pii_audio. Currently supports mp3 (default) and wav. See [PII redaction](https://www.assemblyai.com/docs/models/pii-redaction) for more details.\n */\nexport type RedactPiiAudioQuality =\n (typeof RedactPiiAudioQuality)[keyof typeof RedactPiiAudioQuality]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const RedactPiiAudioQuality = {\n mp3: \"mp3\",\n wav: \"wav\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\nexport type Sentiment = (typeof Sentiment)[keyof typeof Sentiment]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const Sentiment = {\n POSITIVE: \"POSITIVE\",\n NEUTRAL: \"NEUTRAL\",\n NEGATIVE: \"NEGATIVE\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The speech model to use for the transcription.\n */\nexport type SpeechModel = (typeof SpeechModel)[keyof typeof SpeechModel]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SpeechModel = {\n best: \"best\",\n \"slam-1\": \"slam-1\",\n universal: \"universal\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The replacement logic for detected PII, can be \"entity_name\" or \"hash\". See [PII redaction](https://www.assemblyai.com/docs/models/pii-redaction) for more details.\n */\nexport type SubstitutionPolicy = (typeof SubstitutionPolicy)[keyof typeof SubstitutionPolicy]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SubstitutionPolicy = {\n entity_name: \"entity_name\",\n hash: \"hash\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * Format of the subtitles\n */\nexport type SubtitleFormat = (typeof SubtitleFormat)[keyof typeof SubtitleFormat]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SubtitleFormat = {\n srt: \"srt\",\n vtt: \"vtt\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The model to summarize the transcript\n */\nexport type SummaryModel = (typeof SummaryModel)[keyof typeof SummaryModel]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SummaryModel = {\n informative: \"informative\",\n conversational: \"conversational\",\n catchy: \"catchy\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The type of summary\n */\nexport type SummaryType = (typeof SummaryType)[keyof typeof SummaryType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SummaryType = {\n bullets: \"bullets\",\n bullets_verbose: \"bullets_verbose\",\n gist: \"gist\",\n headline: \"headline\",\n paragraph: \"paragraph\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * How much to boost specified words\n */\nexport type TranscriptBoostParam = (typeof TranscriptBoostParam)[keyof typeof TranscriptBoostParam]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptBoostParam = {\n low: \"low\",\n default: \"default\",\n high: \"high\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The language of your audio file. Possible values are found in [Supported Languages](https://www.assemblyai.com/docs/concepts/supported-languages).\nThe default value is 'en_us'.\n\n */\nexport type TranscriptLanguageCode =\n (typeof TranscriptLanguageCode)[keyof typeof TranscriptLanguageCode]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptLanguageCode = {\n en: \"en\",\n en_au: \"en_au\",\n en_uk: \"en_uk\",\n en_us: \"en_us\",\n es: \"es\",\n fr: \"fr\",\n de: \"de\",\n it: \"it\",\n pt: \"pt\",\n nl: \"nl\",\n af: \"af\",\n sq: \"sq\",\n am: \"am\",\n ar: \"ar\",\n hy: \"hy\",\n as: \"as\",\n az: \"az\",\n ba: \"ba\",\n eu: \"eu\",\n be: \"be\",\n bn: \"bn\",\n bs: \"bs\",\n br: \"br\",\n bg: \"bg\",\n my: \"my\",\n ca: \"ca\",\n zh: \"zh\",\n hr: \"hr\",\n cs: \"cs\",\n da: \"da\",\n et: \"et\",\n fo: \"fo\",\n fi: \"fi\",\n gl: \"gl\",\n ka: \"ka\",\n el: \"el\",\n gu: \"gu\",\n ht: \"ht\",\n ha: \"ha\",\n haw: \"haw\",\n he: \"he\",\n hi: \"hi\",\n hu: \"hu\",\n is: \"is\",\n id: \"id\",\n ja: \"ja\",\n jw: \"jw\",\n kn: \"kn\",\n kk: \"kk\",\n km: \"km\",\n ko: \"ko\",\n lo: \"lo\",\n la: \"la\",\n lv: \"lv\",\n ln: \"ln\",\n lt: \"lt\",\n lb: \"lb\",\n mk: \"mk\",\n mg: \"mg\",\n ms: \"ms\",\n ml: \"ml\",\n mt: \"mt\",\n mi: \"mi\",\n mr: \"mr\",\n mn: \"mn\",\n ne: \"ne\",\n no: \"no\",\n nn: \"nn\",\n oc: \"oc\",\n pa: \"pa\",\n ps: \"ps\",\n fa: \"fa\",\n pl: \"pl\",\n ro: \"ro\",\n ru: \"ru\",\n sa: \"sa\",\n sr: \"sr\",\n sn: \"sn\",\n sd: \"sd\",\n si: \"si\",\n sk: \"sk\",\n sl: \"sl\",\n so: \"so\",\n su: \"su\",\n sw: \"sw\",\n sv: \"sv\",\n tl: \"tl\",\n tg: \"tg\",\n ta: \"ta\",\n tt: \"tt\",\n te: \"te\",\n th: \"th\",\n bo: \"bo\",\n tr: \"tr\",\n tk: \"tk\",\n uk: \"uk\",\n ur: \"ur\",\n uz: \"uz\",\n vi: \"vi\",\n cy: \"cy\",\n yi: \"yi\",\n yo: \"yo\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The status of the transcript. Either completed or error.\n */\nexport type TranscriptReadyStatus =\n (typeof TranscriptReadyStatus)[keyof typeof TranscriptReadyStatus]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptReadyStatus = {\n completed: \"completed\",\n error: \"error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The status of your transcript. Possible values are queued, processing, completed, or error.\n */\nexport type TranscriptStatus = (typeof TranscriptStatus)[keyof typeof TranscriptStatus]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptStatus = {\n queued: \"queued\",\n processing: \"processing\",\n completed: \"completed\",\n error: \"error\"\n} as const\n","/**\n * Deepgram transcription provider adapter\n * Documentation: https://developers.deepgram.com/\n */\n\nimport axios, { type AxiosInstance } from \"axios\"\nimport WebSocket from \"ws\"\nimport type {\n AudioChunk,\n AudioInput,\n ProviderCapabilities,\n StreamingCallbacks,\n StreamingOptions,\n StreamingSession,\n TranscribeOptions,\n UnifiedTranscriptResponse\n} from \"../router/types\"\nimport { BaseAdapter, type ProviderConfig } from \"./base-adapter\"\n\n// Import Deepgram generated types\nimport type { ListenV1Response } from \"../generated/deepgram/schema/listenV1Response\"\nimport type { ListenV1MediaTranscribeParams } from \"../generated/deepgram/schema/listenV1MediaTranscribeParams\"\nimport type { ListenV1ResponseResultsChannelsItemAlternativesItem } from \"../generated/deepgram/schema/listenV1ResponseResultsChannelsItemAlternativesItem\"\nimport type { ListenV1ResponseResultsChannelsItemAlternativesItemWordsItem } from \"../generated/deepgram/schema/listenV1ResponseResultsChannelsItemAlternativesItemWordsItem\"\nimport type { ListenV1ResponseResultsUtterancesItem } from \"../generated/deepgram/schema/listenV1ResponseResultsUtterancesItem\"\n\n// WebSocket message types (not in OpenAPI spec, manually defined from Deepgram docs)\ninterface DeepgramResultsMessage {\n type: \"Results\"\n is_final: boolean\n channel: {\n alternatives: Array<{\n transcript: string\n confidence: number\n words?: Array<{\n word: string\n start: number\n end: number\n confidence: number\n }>\n }>\n }\n}\n\ninterface DeepgramUtteranceEndMessage {\n type: \"UtteranceEnd\"\n [key: string]: unknown\n}\n\ninterface DeepgramMetadataMessage {\n type: \"Metadata\"\n [key: string]: unknown\n}\n\ntype DeepgramRealtimeMessage =\n | DeepgramResultsMessage\n | DeepgramUtteranceEndMessage\n | DeepgramMetadataMessage\n\n/**\n * Deepgram transcription provider adapter\n *\n * Implements transcription for the Deepgram API with support for:\n * - Synchronous pre-recorded transcription\n * - Real-time streaming transcription (WebSocket)\n * - Speaker diarization (identifying different speakers)\n * - Multi-language detection and transcription\n * - Summarization and sentiment analysis\n * - Entity detection and intent recognition\n * - Custom vocabulary (keywords)\n * - Word-level timestamps with high precision\n * - PII redaction\n *\n * @see https://developers.deepgram.com/ Deepgram API Documentation\n *\n * @example Basic transcription\n * ```typescript\n * import { DeepgramAdapter } from '@meeting-baas/sdk';\n *\n * const adapter = new DeepgramAdapter();\n * adapter.initialize({\n * apiKey: process.env.DEEPGRAM_API_KEY\n * });\n *\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en',\n * diarization: true\n * });\n *\n * console.log(result.data.text);\n * console.log(result.data.speakers);\n * ```\n *\n * @example With advanced features\n * ```typescript\n * const result = await adapter.transcribe(audio, {\n * language: 'en',\n * diarization: true,\n * summarization: true,\n * sentimentAnalysis: true,\n * entityDetection: true,\n * customVocabulary: ['TypeScript', 'JavaScript', 'API']\n * });\n *\n * console.log('Summary:', result.data.summary);\n * console.log('Sentiment:', result.data.metadata?.sentiment);\n * ```\n */\nexport class DeepgramAdapter extends BaseAdapter {\n readonly name = \"deepgram\" as const\n readonly capabilities: ProviderCapabilities = {\n streaming: true,\n diarization: true,\n wordTimestamps: true,\n languageDetection: true,\n customVocabulary: true,\n summarization: true,\n sentimentAnalysis: true,\n entityDetection: true,\n piiRedaction: true\n }\n\n private client?: AxiosInstance\n protected baseUrl = \"https://api.deepgram.com/v1\"\n private wsBaseUrl = \"wss://api.deepgram.com/v1/listen\"\n\n initialize(config: ProviderConfig): void {\n super.initialize(config)\n\n this.client = axios.create({\n baseURL: config.baseUrl || this.baseUrl,\n timeout: config.timeout || 60000,\n headers: {\n Authorization: `Token ${config.apiKey}`,\n \"Content-Type\": \"application/json\",\n ...config.headers\n }\n })\n }\n\n /**\n * Submit audio for transcription\n *\n * Sends audio to Deepgram API for transcription. Deepgram processes\n * synchronously and returns results immediately (no polling required).\n *\n * @param audio - Audio input (URL or file buffer)\n * @param options - Transcription options\n * @param options.language - Language code (e.g., 'en', 'es', 'fr')\n * @param options.languageDetection - Enable automatic language detection\n * @param options.diarization - Enable speaker identification (diarize)\n * @param options.speakersExpected - Expected number of speakers\n * @param options.summarization - Generate text summary\n * @param options.sentimentAnalysis - Analyze sentiment\n * @param options.entityDetection - Detect named entities\n * @param options.piiRedaction - Redact personally identifiable information\n * @param options.customVocabulary - Keywords to boost in recognition\n * @param options.webhookUrl - Callback URL for async processing\n * @returns Normalized transcription response\n *\n * @example Simple transcription\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * });\n * ```\n *\n * @example With advanced features\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * }, {\n * language: 'en',\n * diarization: true,\n * summarization: true,\n * sentimentAnalysis: true,\n * entityDetection: true,\n * customVocabulary: ['API', 'TypeScript', 'JavaScript']\n * });\n * ```\n */\n async transcribe(\n audio: AudioInput,\n options?: TranscribeOptions\n ): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Build query parameters from options\n const params = this.buildTranscriptionParams(options)\n\n let response: ListenV1Response\n\n if (audio.type === \"url\") {\n // URL-based transcription\n response = await this.client!.post<ListenV1Response>(\n \"/listen\",\n { url: audio.url },\n { params }\n ).then((res) => res.data)\n } else if (audio.type === \"file\") {\n // File-based transcription\n response = await this.client!.post<ListenV1Response>(\"/listen\", audio.file, {\n params,\n headers: {\n \"Content-Type\": \"audio/*\"\n }\n }).then((res) => res.data)\n } else {\n throw new Error(\n \"Deepgram adapter does not support stream type for pre-recorded transcription. Use transcribeStream() for real-time streaming.\"\n )\n }\n\n // Deepgram returns results immediately (synchronous)\n return this.normalizeResponse(response)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Get transcription result by ID\n *\n * Note: Deepgram processes synchronously, so this method is primarily\n * for retrieving cached results if you've stored the request ID.\n * The initial transcribe() call already returns complete results.\n *\n * @param transcriptId - Request ID from Deepgram\n * @returns Normalized transcription response\n */\n async getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n // Deepgram doesn't have a \"get by ID\" endpoint for pre-recorded audio\n // Results are returned immediately on transcription\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"NOT_SUPPORTED\",\n message:\n \"Deepgram returns transcription results immediately. Store the response from transcribe() instead of using getTranscript().\"\n }\n }\n }\n\n /**\n * Build Deepgram transcription parameters from unified options\n */\n private buildTranscriptionParams(options?: TranscribeOptions): ListenV1MediaTranscribeParams {\n const params: ListenV1MediaTranscribeParams = {}\n\n if (!options) {\n return params\n }\n\n // Language configuration\n if (options.language) {\n params.language = options.language\n }\n\n if (options.languageDetection) {\n params.detect_language = true\n }\n\n // Speaker diarization\n if (options.diarization) {\n params.diarize = true\n }\n\n // Custom vocabulary (keywords in Deepgram)\n if (options.customVocabulary && options.customVocabulary.length > 0) {\n params.keywords = options.customVocabulary\n }\n\n // Summarization\n if (options.summarization) {\n params.summarize = true\n }\n\n // Sentiment analysis\n if (options.sentimentAnalysis) {\n params.sentiment = true\n }\n\n // Entity detection\n if (options.entityDetection) {\n params.detect_entities = true\n }\n\n // PII redaction\n if (options.piiRedaction) {\n params.redact = [\"pci\", \"pii\"]\n }\n\n // Webhook callback\n if (options.webhookUrl) {\n params.callback = options.webhookUrl\n }\n\n // Enable features for better results\n params.punctuate = true\n params.utterances = true\n params.smart_format = true\n\n return params\n }\n\n /**\n * Normalize Deepgram response to unified format\n */\n private normalizeResponse(response: ListenV1Response): UnifiedTranscriptResponse {\n // Deepgram returns results immediately\n const channel = response.results.channels?.[0]\n const alternative = channel?.alternatives?.[0]\n\n if (!alternative) {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"NO_RESULTS\",\n message: \"No transcription results returned by Deepgram\"\n },\n raw: response\n }\n }\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: response.metadata?.request_id || \"\",\n text: alternative.transcript || \"\",\n confidence: alternative.confidence,\n status: \"completed\", // Deepgram returns completed results immediately\n language: channel?.detected_language || undefined,\n duration: response.metadata?.duration,\n speakers: this.extractSpeakers(response),\n words: this.extractWords(alternative),\n utterances: this.extractUtterances(response),\n summary: this.extractSummary(alternative),\n metadata: {\n modelInfo: response.metadata?.model_info,\n channels: response.metadata?.channels,\n sentiment: response.results.sentiments,\n intents: response.results.intents,\n topics: response.results.topics\n }\n },\n raw: response\n }\n }\n\n /**\n * Extract speaker information from Deepgram response\n */\n private extractSpeakers(response: ListenV1Response) {\n const utterances = response.results.utterances\n\n if (!utterances || utterances.length === 0) {\n return undefined\n }\n\n // Extract unique speakers from utterances\n const speakerSet = new Set<number>()\n utterances.forEach((utterance: ListenV1ResponseResultsUtterancesItem) => {\n if (utterance.speaker !== undefined) {\n speakerSet.add(utterance.speaker)\n }\n })\n\n if (speakerSet.size === 0) {\n return undefined\n }\n\n return Array.from(speakerSet).map((speakerId) => ({\n id: speakerId.toString(),\n label: `Speaker ${speakerId}`\n }))\n }\n\n /**\n * Extract word timestamps from Deepgram response\n */\n private extractWords(alternative: ListenV1ResponseResultsChannelsItemAlternativesItem) {\n if (!alternative.words || alternative.words.length === 0) {\n return undefined\n }\n\n return alternative.words.map(\n (word: ListenV1ResponseResultsChannelsItemAlternativesItemWordsItem) => ({\n text: word.word || \"\",\n start: word.start || 0,\n end: word.end || 0,\n confidence: word.confidence,\n speaker: undefined // Speaker info is at utterance level, not word level\n })\n )\n }\n\n /**\n * Extract utterances from Deepgram response\n */\n private extractUtterances(response: ListenV1Response) {\n const utterances = response.results.utterances\n\n if (!utterances || utterances.length === 0) {\n return undefined\n }\n\n return utterances.map((utterance: ListenV1ResponseResultsUtterancesItem) => ({\n text: utterance.transcript || \"\",\n start: utterance.start || 0,\n end: utterance.end || 0,\n speaker: utterance.speaker?.toString(),\n confidence: utterance.confidence,\n words: utterance.words?.map((word) => ({\n text: word.word || \"\",\n start: word.start || 0,\n end: word.end || 0,\n confidence: word.confidence\n }))\n }))\n }\n\n /**\n * Extract summary from Deepgram response\n */\n private extractSummary(\n alternative: ListenV1ResponseResultsChannelsItemAlternativesItem\n ): string | undefined {\n if (!alternative.summaries || alternative.summaries.length === 0) {\n return undefined\n }\n\n // Combine all summaries into one\n return alternative.summaries\n .map((summary) => summary.summary)\n .filter(Boolean)\n .join(\" \")\n }\n\n /**\n * Stream audio for real-time transcription\n *\n * Creates a WebSocket connection to Deepgram for streaming transcription.\n * Send audio chunks via session.sendAudio() and receive results via callbacks.\n *\n * @param options - Streaming configuration options\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n *\n * @example Real-time streaming\n * ```typescript\n * const session = await adapter.transcribeStream({\n * encoding: 'linear16',\n * sampleRate: 16000,\n * channels: 1,\n * language: 'en',\n * diarization: true,\n * interimResults: true\n * }, {\n * onOpen: () => console.log('Connected'),\n * onTranscript: (event) => {\n * if (event.isFinal) {\n * console.log('Final:', event.text);\n * } else {\n * console.log('Interim:', event.text);\n * }\n * },\n * onError: (error) => console.error('Error:', error),\n * onClose: () => console.log('Disconnected')\n * });\n *\n * // Send audio chunks\n * const audioChunk = getAudioChunk(); // Your audio source\n * await session.sendAudio({ data: audioChunk });\n *\n * // Close when done\n * await session.close();\n * ```\n */\n async transcribeStream(\n options?: StreamingOptions,\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession> {\n this.validateConfig()\n\n // Build query parameters for WebSocket URL\n const params = new URLSearchParams()\n\n if (options?.encoding) params.append(\"encoding\", options.encoding)\n if (options?.sampleRate) params.append(\"sample_rate\", options.sampleRate.toString())\n if (options?.channels) params.append(\"channels\", options.channels.toString())\n if (options?.language) params.append(\"language\", options.language)\n if (options?.languageDetection) params.append(\"detect_language\", \"true\")\n if (options?.diarization) params.append(\"diarize\", \"true\")\n if (options?.interimResults) params.append(\"interim_results\", \"true\")\n if (options?.summarization) params.append(\"summarize\", \"true\")\n if (options?.sentimentAnalysis) params.append(\"sentiment\", \"true\")\n if (options?.entityDetection) params.append(\"detect_entities\", \"true\")\n if (options?.piiRedaction) params.append(\"redact\", \"pii\")\n if (options?.customVocabulary && options.customVocabulary.length > 0) {\n params.append(\"keywords\", options.customVocabulary.join(\",\"))\n }\n\n const wsUrl = `${this.wsBaseUrl}?${params.toString()}`\n\n // Create WebSocket connection\n const ws = new WebSocket(wsUrl, {\n headers: {\n Authorization: `Token ${this.config!.apiKey}`\n }\n })\n\n let sessionStatus: \"connecting\" | \"open\" | \"closing\" | \"closed\" = \"connecting\"\n const sessionId = `deepgram-${Date.now()}-${Math.random().toString(36).substring(7)}`\n\n // Handle WebSocket events\n ws.on(\"open\", () => {\n sessionStatus = \"open\"\n callbacks?.onOpen?.()\n })\n\n ws.on(\"message\", (data: Buffer) => {\n try {\n const message = JSON.parse(data.toString()) as DeepgramRealtimeMessage\n\n // Handle different message types from Deepgram - TYPE SAFE!\n if (message.type === \"Results\") {\n // Type narrowed to DeepgramResultsMessage\n const channel = message.channel.alternatives[0]\n\n if (channel) {\n const transcript = channel.transcript\n const isFinal = message.is_final\n const words = channel.words?.map((word) => ({\n text: word.word,\n start: word.start,\n end: word.end,\n confidence: word.confidence\n }))\n\n callbacks?.onTranscript?.({\n type: \"transcript\",\n text: transcript,\n isFinal,\n words,\n confidence: channel.confidence,\n data: message\n })\n }\n } else if (message.type === \"UtteranceEnd\") {\n // Type narrowed to DeepgramUtteranceEndMessage\n callbacks?.onMetadata?.(message)\n } else if (message.type === \"Metadata\") {\n // Type narrowed to DeepgramMetadataMessage\n callbacks?.onMetadata?.(message)\n }\n } catch (error) {\n callbacks?.onError?.({\n code: \"PARSE_ERROR\",\n message: \"Failed to parse WebSocket message\",\n details: error\n })\n }\n })\n\n ws.on(\"error\", (error: Error) => {\n callbacks?.onError?.({\n code: \"WEBSOCKET_ERROR\",\n message: error.message,\n details: error\n })\n })\n\n ws.on(\"close\", (code: number, reason: Buffer) => {\n sessionStatus = \"closed\"\n callbacks?.onClose?.(code, reason.toString())\n })\n\n // Wait for connection to open\n await new Promise<void>((resolve, reject) => {\n const timeout = setTimeout(() => {\n reject(new Error(\"WebSocket connection timeout\"))\n }, 10000)\n\n ws.once(\"open\", () => {\n clearTimeout(timeout)\n resolve()\n })\n\n ws.once(\"error\", (error) => {\n clearTimeout(timeout)\n reject(error)\n })\n })\n\n // Return StreamingSession interface\n return {\n id: sessionId,\n provider: this.name,\n createdAt: new Date(),\n getStatus: () => sessionStatus,\n sendAudio: async (chunk: AudioChunk) => {\n if (sessionStatus !== \"open\") {\n throw new Error(`Cannot send audio: session is ${sessionStatus}`)\n }\n\n if (ws.readyState !== WebSocket.OPEN) {\n throw new Error(\"WebSocket is not open\")\n }\n\n // Send audio data\n ws.send(chunk.data)\n\n // Send close message if this is the last chunk\n if (chunk.isLast) {\n ws.send(JSON.stringify({ type: \"CloseStream\" }))\n }\n },\n close: async () => {\n if (sessionStatus === \"closed\" || sessionStatus === \"closing\") {\n return\n }\n\n sessionStatus = \"closing\"\n\n // Send CloseStream message before closing\n if (ws.readyState === WebSocket.OPEN) {\n ws.send(JSON.stringify({ type: \"CloseStream\" }))\n }\n\n // Close WebSocket\n return new Promise<void>((resolve) => {\n const timeout = setTimeout(() => {\n ws.terminate()\n resolve()\n }, 5000)\n\n ws.close()\n\n ws.once(\"close\", () => {\n clearTimeout(timeout)\n sessionStatus = \"closed\"\n resolve()\n })\n })\n }\n }\n }\n}\n\n/**\n * Factory function to create a Deepgram adapter\n */\nexport function createDeepgramAdapter(config: ProviderConfig): DeepgramAdapter {\n const adapter = new DeepgramAdapter()\n adapter.initialize(config)\n return adapter\n}\n","/**\n * Azure Speech-to-Text transcription provider adapter\n * Documentation: https://learn.microsoft.com/azure/cognitive-services/speech-service/\n */\n\nimport axios from \"axios\"\nimport type {\n AudioInput,\n ProviderCapabilities,\n TranscribeOptions,\n UnifiedTranscriptResponse\n} from \"../router/types\"\nimport { BaseAdapter, type ProviderConfig } from \"./base-adapter\"\n\n// Import generated API client functions - FULL TYPE SAFETY!\nimport {\n transcriptionsCreate,\n transcriptionsGet,\n transcriptionsListFiles\n} from \"../generated/azure/api/speechServicesAPIV31\"\n\n// Import Azure generated types\nimport type { Transcription } from \"../generated/azure/schema/transcription\"\nimport type { TranscriptionProperties } from \"../generated/azure/schema/transcriptionProperties\"\n\n/**\n * Azure Speech-to-Text transcription provider adapter\n *\n * Implements transcription for Azure Cognitive Services Speech API with support for:\n * - Batch transcription (async processing)\n * - Speaker diarization (identifying different speakers)\n * - Multi-language support\n * - Custom models and acoustic models\n * - Word-level timestamps\n * - Profanity filtering\n * - Punctuation and capitalization\n *\n * @see https://learn.microsoft.com/azure/cognitive-services/speech-service/ Azure Speech Documentation\n *\n * @example Basic transcription\n * ```typescript\n * import { AzureSTTAdapter } from '@meeting-baas/sdk';\n *\n * const adapter = new AzureSTTAdapter();\n * adapter.initialize({\n * apiKey: process.env.AZURE_SPEECH_KEY,\n * region: 'eastus' // Your Azure region\n * });\n *\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en-US',\n * diarization: true\n * });\n *\n * console.log(result.data.text);\n * ```\n *\n * @example With custom model\n * ```typescript\n * const result = await adapter.transcribe(audio, {\n * language: 'en-US',\n * diarization: true,\n * metadata: {\n * modelId: 'custom-model-id'\n * }\n * });\n * ```\n *\n * @example Async transcription with polling (Azure always returns job ID)\n * ```typescript\n * // Submit transcription (Azure is always async)\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en-US',\n * diarization: true\n * });\n *\n * // Get transcription ID for polling\n * const transcriptionId = result.data?.id;\n * console.log('Transcription ID:', transcriptionId);\n *\n * // Poll for completion\n * const poll = async () => {\n * const status = await adapter.getTranscript(transcriptionId);\n * if (status.data?.status === 'completed') {\n * console.log('Transcript:', status.data.text);\n * } else if (status.data?.status === 'processing') {\n * setTimeout(poll, 5000); // Poll every 5 seconds\n * }\n * };\n * await poll();\n * ```\n */\nexport class AzureSTTAdapter extends BaseAdapter {\n readonly name = \"azure-stt\" as const\n readonly capabilities: ProviderCapabilities = {\n streaming: false, // Batch transcription only\n diarization: true,\n wordTimestamps: true,\n languageDetection: false,\n customVocabulary: true,\n summarization: false,\n sentimentAnalysis: false,\n entityDetection: false,\n piiRedaction: false\n }\n\n private region?: string\n protected baseUrl = \"https://eastus.api.cognitive.microsoft.com/speechtotext/v3.1\" // Default, overridden in initialize()\n\n initialize(config: ProviderConfig & { region?: string }): void {\n super.initialize(config)\n\n this.region = config.region || \"eastus\"\n this.baseUrl =\n config.baseUrl || `https://${this.region}.api.cognitive.microsoft.com/speechtotext/v3.1`\n }\n\n /**\n * Get axios config for generated API client functions\n * Configures headers and base URL using Azure subscription key\n */\n protected getAxiosConfig() {\n return super.getAxiosConfig(\"Ocp-Apim-Subscription-Key\")\n }\n\n /**\n * Submit audio for transcription\n *\n * Azure Speech-to-Text uses batch transcription which processes asynchronously.\n * You need to poll getTranscript() to retrieve the completed transcription.\n *\n * @param audio - Audio input (URL only for batch transcription)\n * @param options - Transcription options\n * @returns Response with transcription ID for polling\n */\n async transcribe(\n audio: AudioInput,\n options?: TranscribeOptions\n ): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n if (audio.type !== \"url\") {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"INVALID_INPUT\",\n message: \"Azure Speech-to-Text batch transcription only supports URL input\"\n }\n }\n }\n\n try {\n const transcriptionRequest: Partial<Transcription> = {\n displayName: (options?.metadata?.displayName as string) || \"SDK Transcription\",\n description: (options?.metadata?.description as string) || \"\",\n locale: options?.language || \"en-US\",\n contentUrls: [audio.url],\n properties: this.buildTranscriptionProperties(options)\n }\n\n // Use generated API client function - FULLY TYPED!\n const response = await transcriptionsCreate(\n transcriptionRequest as Transcription,\n this.getAxiosConfig()\n )\n\n const transcription = response.data\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: transcription.self?.split(\"/\").pop() || \"\",\n text: \"\", // Will be populated after polling\n status: this.normalizeStatus(transcription.status),\n language: transcription.locale,\n createdAt: transcription.createdDateTime\n },\n raw: transcription\n }\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Get transcription result by ID\n *\n * Poll this method to check transcription status and retrieve results.\n *\n * @param transcriptId - Transcription ID from Azure\n * @returns Transcription response with status and results\n */\n async getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Get transcription status using generated API\n const statusResponse = await transcriptionsGet(transcriptId, this.getAxiosConfig())\n\n const transcription = statusResponse.data\n const status = this.normalizeStatus(transcription.status)\n\n if (status !== \"completed\") {\n return {\n success: true,\n provider: this.name,\n data: {\n id: transcriptId,\n text: \"\",\n status,\n language: transcription.locale,\n createdAt: transcription.createdDateTime\n },\n raw: transcription\n }\n }\n\n // Get transcription files (results) using generated API\n if (!transcription.links?.files) {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"NO_RESULTS\",\n message: \"Transcription completed but no result files available\"\n },\n raw: transcription\n }\n }\n\n const filesResponse = await transcriptionsListFiles(\n transcriptId,\n undefined,\n this.getAxiosConfig()\n )\n const files = filesResponse.data?.values || []\n\n // Find the transcription result file\n const resultFile = files.find((file: any) => file.kind === \"Transcription\")\n\n if (!resultFile?.links?.contentUrl) {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"NO_RESULTS\",\n message: \"Transcription result file not found\"\n },\n raw: transcription\n }\n }\n\n // Fetch the actual transcription content (contentUrl is an external link, not part of API)\n const contentResponse = await axios.get(resultFile.links.contentUrl)\n const transcriptionData = contentResponse.data\n\n return this.normalizeResponse(transcription, transcriptionData)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Build Azure-specific transcription properties\n */\n private buildTranscriptionProperties(options?: TranscribeOptions): TranscriptionProperties {\n const properties: any = {\n wordLevelTimestampsEnabled: options?.wordTimestamps ?? true,\n punctuationMode: \"DictatedAndAutomatic\",\n profanityFilterMode: \"Masked\"\n }\n\n if (options?.diarization) {\n properties.diarizationEnabled = true\n if (options.speakersExpected) {\n properties.diarization = {\n speakers: {\n minCount: 1,\n maxCount: options.speakersExpected\n }\n }\n }\n }\n\n if (options?.customVocabulary && options.customVocabulary.length > 0) {\n properties.customProperties = {\n phrases: options.customVocabulary.join(\",\")\n }\n }\n\n return properties\n }\n\n /**\n * Normalize Azure status to unified status\n */\n private normalizeStatus(status: any): \"queued\" | \"processing\" | \"completed\" | \"error\" {\n const statusStr = status?.toString().toLowerCase() || \"\"\n\n if (statusStr.includes(\"succeeded\")) return \"completed\"\n if (statusStr.includes(\"running\")) return \"processing\"\n if (statusStr.includes(\"notstarted\")) return \"queued\"\n if (statusStr.includes(\"failed\")) return \"error\"\n\n return \"queued\"\n }\n\n /**\n * Normalize Azure transcription response to unified format\n */\n private normalizeResponse(\n transcription: Transcription,\n transcriptionData: any\n ): UnifiedTranscriptResponse {\n const combinedPhrases = transcriptionData.combinedRecognizedPhrases || []\n const recognizedPhrases = transcriptionData.recognizedPhrases || []\n\n // Get full text from combined phrases\n const fullText =\n combinedPhrases.map((phrase: any) => phrase.display || phrase.lexical).join(\" \") || \"\"\n\n // Extract words with timestamps\n const words = recognizedPhrases.flatMap((phrase: any) =>\n (phrase.nBest?.[0]?.words || []).map((word: any) => ({\n text: word.word,\n start: word.offsetInTicks / 10000000, // Convert ticks to seconds\n end: (word.offsetInTicks + word.durationInTicks) / 10000000,\n confidence: word.confidence,\n speaker: phrase.speaker !== undefined ? phrase.speaker.toString() : undefined\n }))\n )\n\n // Extract speakers if diarization was enabled\n const speakers =\n recognizedPhrases.length > 0 && recognizedPhrases[0].speaker !== undefined\n ? Array.from(\n new Set(\n recognizedPhrases.map((p: any) => p.speaker).filter((s: any) => s !== undefined)\n )\n ).map((speakerId: unknown) => ({\n id: String(speakerId),\n label: `Speaker ${speakerId}`\n }))\n : undefined\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: transcription.self?.split(\"/\").pop() || \"\",\n text: fullText,\n confidence: recognizedPhrases[0]?.nBest?.[0]?.confidence,\n status: \"completed\",\n language: transcription.locale,\n duration: transcriptionData.duration ? transcriptionData.duration / 10000000 : undefined,\n speakers,\n words: words.length > 0 ? words : undefined,\n createdAt: transcription.createdDateTime,\n completedAt: transcription.lastActionDateTime\n },\n raw: {\n transcription,\n transcriptionData\n }\n }\n }\n}\n\n/**\n * Factory function to create an Azure STT adapter\n */\nexport function createAzureSTTAdapter(\n config: ProviderConfig & { region?: string }\n): AzureSTTAdapter {\n const adapter = new AzureSTTAdapter()\n adapter.initialize(config)\n return adapter\n}\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Speech Services API v3.1\n * Speech Services API v3.1.\n * OpenAPI spec version: v3.1\n */\n\nimport { faker } from \"@faker-js/faker\"\nimport type { AxiosRequestConfig, AxiosResponse } from \"axios\"\nimport axios from \"axios\"\nimport { delay, HttpResponse, http } from \"msw\"\nimport type {\n BaseModel,\n CommitBlocksEntry,\n CustomModel,\n Dataset,\n DatasetLocales,\n DatasetsGetFileParams,\n DatasetsListFilesParams,\n DatasetsListParams,\n DatasetsUploadBlockParams,\n DatasetsUploadBody,\n DatasetUpdate,\n Endpoint,\n EndpointsDeleteBaseModelLogsParams,\n EndpointsDeleteLogsParams,\n EndpointsGetBaseModelLogParams,\n EndpointsGetLogParams,\n EndpointsListBaseModelLogsParams,\n EndpointsListLogsParams,\n EndpointsListParams,\n EndpointUpdate,\n Evaluation,\n EvaluationsGetFileParams,\n EvaluationsListFilesParams,\n EvaluationsListParams,\n EvaluationUpdate,\n File,\n ModelCopy,\n ModelManifest,\n ModelsGetBaseModelManifestParams,\n ModelsGetCustomModelManifestParams,\n ModelsGetFileParams,\n ModelsListBaseModelsParams,\n ModelsListCustomModelsParams,\n ModelsListFilesParams,\n ModelUpdate,\n PaginatedBaseModels,\n PaginatedCustomModels,\n PaginatedDatasets,\n PaginatedEndpoints,\n PaginatedEvaluations,\n PaginatedFiles,\n PaginatedProjects,\n PaginatedTranscriptions,\n PaginatedWebHooks,\n Project,\n ProjectsListDatasetsParams,\n ProjectsListEndpointsParams,\n ProjectsListEvaluationsParams,\n ProjectsListModelsParams,\n ProjectsListParams,\n ProjectsListTranscriptionsParams,\n ProjectUpdate,\n ServiceHealth,\n Transcription,\n TranscriptionsGetFileParams,\n TranscriptionsListFilesParams,\n TranscriptionsListParams,\n TranscriptionUpdate,\n UploadedBlocks,\n WebHook,\n WebHooksListParams,\n WebHookUpdate\n} from \"../schema\"\nimport {\n DatasetKind,\n FileKind,\n HealthStatus,\n ProfanityFilterMode,\n PunctuationMode,\n Status\n} from \"../schema\"\n\n// https://stackoverflow.com/questions/49579094/typescript-conditional-types-filter-out-readonly-properties-pick-only-requir/49579497#49579497\ntype IfEquals<X, Y, A = X, B = never> = (<T>() => T extends X ? 1 : 2) extends <T>() => T extends Y\n ? 1\n : 2\n ? A\n : B\n\ntype WritableKeys<T> = {\n [P in keyof T]-?: IfEquals<{ [Q in P]: T[P] }, { -readonly [Q in P]: T[P] }, P>\n}[keyof T]\n\ntype UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends (k: infer I) => void\n ? I\n : never\ntype DistributeReadOnlyOverUnions<T> = T extends any ? NonReadonly<T> : never\n\ntype Writable<T> = Pick<T, WritableKeys<T>>\ntype NonReadonly<T> = [T] extends [UnionToIntersection<T>]\n ? {\n [P in keyof Writable<T>]: T[P] extends object ? NonReadonly<NonNullable<T[P]>> : T[P]\n }\n : DistributeReadOnlyOverUnions<T>\n\n/**\n * @summary Gets a list of supported locales for datasets.\n */\nexport const datasetsListSupportedLocales = <TData = AxiosResponse<DatasetLocales>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/datasets/locales\", options)\n}\n\n/**\n * @summary Gets a list of datasets for the authenticated subscription.\n */\nexport const datasetsList = <TData = AxiosResponse<PaginatedDatasets>>(\n params?: DatasetsListParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/datasets\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Uploads and creates a new dataset by getting the data from a specified URL or starts waiting for data blocks to be uploaded.\n */\nexport const datasetsCreate = <TData = AxiosResponse<Dataset>>(\n dataset: NonReadonly<Dataset>,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/datasets\", dataset, options)\n}\n\n/**\n * @summary Gets the dataset identified by the given ID.\n */\nexport const datasetsGet = <TData = AxiosResponse<Dataset>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/datasets/${id}`, options)\n}\n\n/**\n * @summary Updates the mutable details of the dataset identified by its ID.\n */\nexport const datasetsUpdate = <TData = AxiosResponse<Dataset>>(\n id: string,\n datasetUpdate: DatasetUpdate,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/datasets/${id}`, datasetUpdate, options)\n}\n\n/**\n * @summary Deletes the specified dataset.\n */\nexport const datasetsDelete = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/datasets/${id}`, options)\n}\n\n/**\n * @summary Gets the list of uploaded blocks for this dataset.\n */\nexport const datasetsGetBlocks = <TData = AxiosResponse<UploadedBlocks>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/datasets/${id}/blocks`, options)\n}\n\n/**\n * @summary Upload a block of data for the dataset. The maximum size of the block is 8MiB.\n */\nexport const datasetsUploadBlock = <TData = AxiosResponse<void>>(\n id: string,\n datasetsUploadBlockBody: Blob,\n params: DatasetsUploadBlockParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.put(`/datasets/${id}/blocks`, datasetsUploadBlockBody, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @deprecated\n * @summary Uploads data and creates a new dataset.\n */\nexport const datasetsUpload = <TData = AxiosResponse<Dataset>>(\n datasetsUploadBody: DatasetsUploadBody,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n const formData = new FormData()\n if (datasetsUploadBody.project !== undefined) {\n formData.append(\"project\", datasetsUploadBody.project)\n }\n formData.append(\"displayName\", datasetsUploadBody.displayName)\n if (datasetsUploadBody.description !== undefined) {\n formData.append(\"description\", datasetsUploadBody.description)\n }\n formData.append(\"locale\", datasetsUploadBody.locale)\n formData.append(\"kind\", datasetsUploadBody.kind)\n if (datasetsUploadBody.customProperties !== undefined) {\n formData.append(\"customProperties\", datasetsUploadBody.customProperties)\n }\n if (datasetsUploadBody.data !== undefined) {\n formData.append(\"data\", datasetsUploadBody.data)\n }\n if (datasetsUploadBody.email !== undefined) {\n formData.append(\"email\", datasetsUploadBody.email)\n }\n\n return axios.post(\"/datasets/upload\", formData, options)\n}\n\n/**\n * @summary Commit block list to complete the upload of the dataset.\n */\nexport const datasetsCommitBlocks = <TData = AxiosResponse<void>>(\n id: string,\n commitBlocksEntry: CommitBlocksEntry[],\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(`/datasets/${id}/blocks:commit`, commitBlocksEntry, options)\n}\n\n/**\n * @summary Gets the files of the dataset identified by the given ID.\n */\nexport const datasetsListFiles = <TData = AxiosResponse<PaginatedFiles>>(\n id: string,\n params?: DatasetsListFilesParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/datasets/${id}/files`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets one specific file (identified with fileId) from a dataset (identified with id).\n */\nexport const datasetsGetFile = <TData = AxiosResponse<File>>(\n id: string,\n fileId: string,\n params?: DatasetsGetFileParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/datasets/${id}/files/${fileId}`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets a list of supported locales for endpoint creations.\n */\nexport const endpointsListSupportedLocales = <TData = AxiosResponse<string[]>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/endpoints/locales\", options)\n}\n\n/**\n * @summary Gets the list of endpoints for the authenticated subscription.\n */\nexport const endpointsList = <TData = AxiosResponse<PaginatedEndpoints>>(\n params?: EndpointsListParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/endpoints\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Creates a new endpoint.\n */\nexport const endpointsCreate = <TData = AxiosResponse<Endpoint>>(\n endpoint: NonReadonly<Endpoint>,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/endpoints\", endpoint, options)\n}\n\n/**\n * @summary Gets the endpoint identified by the given ID.\n */\nexport const endpointsGet = <TData = AxiosResponse<Endpoint>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/endpoints/${id}`, options)\n}\n\n/**\n * @summary Deletes the endpoint identified by the given ID.\n */\nexport const endpointsDelete = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/endpoints/${id}`, options)\n}\n\n/**\n * @summary Updates the metadata of the endpoint identified by the given ID.\n */\nexport const endpointsUpdate = <TData = AxiosResponse<Endpoint>>(\n id: string,\n endpointUpdate: EndpointUpdate,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/endpoints/${id}`, endpointUpdate, options)\n}\n\n/**\n * @summary Gets the list of audio and transcription logs that have been stored for a given endpoint.\n */\nexport const endpointsListLogs = <TData = AxiosResponse<PaginatedFiles>>(\n id: string,\n params?: EndpointsListLogsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/endpoints/${id}/files/logs`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * The deletion process is done asynchronously and can take up to one day depending on the amount of log files.\n * @summary Deletes the specified audio and transcription logs that have been stored for a given endpoint. It deletes all logs before (and including) a specific day.\n */\nexport const endpointsDeleteLogs = <TData = AxiosResponse<void>>(\n id: string,\n params?: EndpointsDeleteLogsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/endpoints/${id}/files/logs`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets a specific audio or transcription log for a given endpoint.\n */\nexport const endpointsGetLog = <TData = AxiosResponse<File>>(\n id: string,\n logId: string,\n params?: EndpointsGetLogParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/endpoints/${id}/files/logs/${logId}`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Deletes one audio or transcription log that have been stored for a given endpoint.\n */\nexport const endpointsDeleteLog = <TData = AxiosResponse<void>>(\n id: string,\n logId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/endpoints/${id}/files/logs/${logId}`, options)\n}\n\n/**\n * @summary Gets the list of audio and transcription logs that have been stored when using the default base model of a given language.\n */\nexport const endpointsListBaseModelLogs = <TData = AxiosResponse<PaginatedFiles>>(\n locale: string,\n params?: EndpointsListBaseModelLogsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/endpoints/base/${locale}/files/logs`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * Deletion process is done asynchronously and can take up to one day depending on the amount of log files.\n * @summary Deletes the specified audio and transcription logs that have been stored when using the default base model of a given language. It deletes all logs before (and including) a specific day.\n */\nexport const endpointsDeleteBaseModelLogs = <TData = AxiosResponse<void>>(\n locale: string,\n params?: EndpointsDeleteBaseModelLogsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/endpoints/base/${locale}/files/logs`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets a specific audio or transcription log for the default base model in a given language.\n */\nexport const endpointsGetBaseModelLog = <TData = AxiosResponse<File>>(\n locale: string,\n logId: string,\n params?: EndpointsGetBaseModelLogParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/endpoints/base/${locale}/files/logs/${logId}`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Deletes one audio or transcription log that have been stored when using the default base model of a given language.\n */\nexport const endpointsDeleteBaseModelLog = <TData = AxiosResponse<void>>(\n locale: string,\n logId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/endpoints/base/${locale}/files/logs/${logId}`, options)\n}\n\n/**\n * @summary Gets a list of supported locales for evaluations.\n */\nexport const evaluationsListSupportedLocales = <TData = AxiosResponse<string[]>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/evaluations/locales\", options)\n}\n\n/**\n * @summary Gets the list of evaluations for the authenticated subscription.\n */\nexport const evaluationsList = <TData = AxiosResponse<PaginatedEvaluations>>(\n params?: EvaluationsListParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/evaluations\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Creates a new evaluation.\n */\nexport const evaluationsCreate = <TData = AxiosResponse<Evaluation>>(\n evaluation: NonReadonly<Evaluation>,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/evaluations\", evaluation, options)\n}\n\n/**\n * @summary Gets the files of the evaluation identified by the given ID.\n */\nexport const evaluationsListFiles = <TData = AxiosResponse<PaginatedFiles>>(\n id: string,\n params?: EvaluationsListFilesParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/evaluations/${id}/files`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets one specific file (identified with fileId) from an evaluation (identified with id).\n */\nexport const evaluationsGetFile = <TData = AxiosResponse<File>>(\n id: string,\n fileId: string,\n params?: EvaluationsGetFileParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/evaluations/${id}/files/${fileId}`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the evaluation identified by the given ID.\n */\nexport const evaluationsGet = <TData = AxiosResponse<Evaluation>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/evaluations/${id}`, options)\n}\n\n/**\n * @summary Updates the mutable details of the evaluation identified by its id.\n */\nexport const evaluationsUpdate = <TData = AxiosResponse<Evaluation>>(\n id: string,\n evaluationUpdate: EvaluationUpdate,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/evaluations/${id}`, evaluationUpdate, options)\n}\n\n/**\n * @summary Deletes the evaluation identified by the given ID.\n */\nexport const evaluationsDelete = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/evaluations/${id}`, options)\n}\n\n/**\n * @summary Gets a list of supported locales for model adaptation.\n */\nexport const modelsListSupportedLocales = <TData = AxiosResponse<string[]>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/models/locales\", options)\n}\n\n/**\n * @summary Gets the list of custom models for the authenticated subscription.\n */\nexport const modelsListCustomModels = <TData = AxiosResponse<PaginatedCustomModels>>(\n params?: ModelsListCustomModelsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/models\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Creates a new model.\n */\nexport const modelsCreate = <TData = AxiosResponse<CustomModel>>(\n customModel: NonReadonly<CustomModel>,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/models\", customModel, options)\n}\n\n/**\n * @summary Gets the list of base models for the authenticated subscription.\n */\nexport const modelsListBaseModels = <TData = AxiosResponse<PaginatedBaseModels>>(\n params?: ModelsListBaseModelsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/models/base\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the model identified by the given ID.\n */\nexport const modelsGetCustomModel = <TData = AxiosResponse<CustomModel>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/models/${id}`, options)\n}\n\n/**\n * @summary Updates the metadata of the model identified by the given ID.\n */\nexport const modelsUpdate = <TData = AxiosResponse<CustomModel>>(\n id: string,\n modelUpdate: ModelUpdate,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/models/${id}`, modelUpdate, options)\n}\n\n/**\n * @summary Deletes the model identified by the given ID.\n */\nexport const modelsDelete = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/models/${id}`, options)\n}\n\n/**\n * @summary Gets the base model identified by the given ID.\n */\nexport const modelsGetBaseModel = <TData = AxiosResponse<BaseModel>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/models/base/${id}`, options)\n}\n\n/**\n * This method can be used to copy a model from one location to another. If the target subscription\nkey belongs to a subscription created for another location, the model will be copied to that location.\nOnly adapted models are allowed to copy to another subscription.\n * @summary Copies a model from one subscription to another.\n */\nexport const modelsCopyTo = <TData = AxiosResponse<CustomModel>>(\n id: string,\n modelCopy: ModelCopy,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(`/models/${id}:copyto`, modelCopy, options)\n}\n\n/**\n * @summary Returns an manifest for this model which can be used in an on-premise container.\n */\nexport const modelsGetCustomModelManifest = <TData = AxiosResponse<ModelManifest>>(\n id: string,\n params?: ModelsGetCustomModelManifestParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/models/${id}/manifest`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Returns an manifest for this base model which can be used in an on-premise container.\n */\nexport const modelsGetBaseModelManifest = <TData = AxiosResponse<ModelManifest>>(\n id: string,\n params?: ModelsGetBaseModelManifestParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/models/base/${id}/manifest`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the files of the model identified by the given ID.\n */\nexport const modelsListFiles = <TData = AxiosResponse<PaginatedFiles>>(\n id: string,\n params?: ModelsListFilesParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/models/${id}/files`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets one specific file (identified with fileId) from a model (identified with id).\n */\nexport const modelsGetFile = <TData = AxiosResponse<File>>(\n id: string,\n fileId: string,\n params?: ModelsGetFileParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/models/${id}/files/${fileId}`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the list of supported locales.\n */\nexport const projectsListSupportedLocales = <TData = AxiosResponse<string[]>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/projects/locales\", options)\n}\n\n/**\n * @summary Gets the list of projects for the authenticated subscription.\n */\nexport const projectsList = <TData = AxiosResponse<PaginatedProjects>>(\n params?: ProjectsListParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/projects\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Creates a new project.\n */\nexport const projectsCreate = <TData = AxiosResponse<Project>>(\n project: NonReadonly<Project>,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/projects\", project, options)\n}\n\n/**\n * @summary Gets the project identified by the given ID.\n */\nexport const projectsGet = <TData = AxiosResponse<Project>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/projects/${id}`, options)\n}\n\n/**\n * @summary Updates the project identified by the given ID.\n */\nexport const projectsUpdate = <TData = AxiosResponse<Project>>(\n id: string,\n projectUpdate: ProjectUpdate,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/projects/${id}`, projectUpdate, options)\n}\n\n/**\n * @summary Deletes the project identified by the given ID.\n */\nexport const projectsDelete = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/projects/${id}`, options)\n}\n\n/**\n * @summary Gets the list of evaluations for specified project.\n */\nexport const projectsListEvaluations = <TData = AxiosResponse<PaginatedEvaluations>>(\n id: string,\n params?: ProjectsListEvaluationsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/projects/${id}/evaluations`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the list of datasets for specified project.\n */\nexport const projectsListDatasets = <TData = AxiosResponse<PaginatedDatasets>>(\n id: string,\n params?: ProjectsListDatasetsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/projects/${id}/datasets`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the list of endpoints for specified project.\n */\nexport const projectsListEndpoints = <TData = AxiosResponse<PaginatedEndpoints>>(\n id: string,\n params?: ProjectsListEndpointsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/projects/${id}/endpoints`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the list of models for specified project.\n */\nexport const projectsListModels = <TData = AxiosResponse<PaginatedCustomModels>>(\n id: string,\n params?: ProjectsListModelsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/projects/${id}/models`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the list of transcriptions for specified project.\n */\nexport const projectsListTranscriptions = <TData = AxiosResponse<PaginatedTranscriptions>>(\n id: string,\n params?: ProjectsListTranscriptionsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/projects/${id}/transcriptions`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets a list of supported locales for offline transcriptions.\n */\nexport const transcriptionsListSupportedLocales = <TData = AxiosResponse<string[]>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/transcriptions/locales\", options)\n}\n\n/**\n * @summary Gets a list of transcriptions for the authenticated subscription.\n */\nexport const transcriptionsList = <TData = AxiosResponse<PaginatedTranscriptions>>(\n params?: TranscriptionsListParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/transcriptions\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Creates a new transcription.\n */\nexport const transcriptionsCreate = <TData = AxiosResponse<Transcription>>(\n transcription: NonReadonly<Transcription>,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/transcriptions\", transcription, options)\n}\n\n/**\n * @summary Gets the transcription identified by the given ID.\n */\nexport const transcriptionsGet = <TData = AxiosResponse<Transcription>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/transcriptions/${id}`, options)\n}\n\n/**\n * @summary Updates the mutable details of the transcription identified by its ID.\n */\nexport const transcriptionsUpdate = <TData = AxiosResponse<Transcription>>(\n id: string,\n transcriptionUpdate: TranscriptionUpdate,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/transcriptions/${id}`, transcriptionUpdate, options)\n}\n\n/**\n * @summary Deletes the specified transcription task.\n */\nexport const transcriptionsDelete = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/transcriptions/${id}`, options)\n}\n\n/**\n * @summary Gets the files of the transcription identified by the given ID.\n */\nexport const transcriptionsListFiles = <TData = AxiosResponse<PaginatedFiles>>(\n id: string,\n params?: TranscriptionsListFilesParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/transcriptions/${id}/files`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets one specific file (identified with fileId) from a transcription (identified with id).\n */\nexport const transcriptionsGetFile = <TData = AxiosResponse<File>>(\n id: string,\n fileId: string,\n params?: TranscriptionsGetFileParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/transcriptions/${id}/files/${fileId}`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the list of web hooks for the authenticated subscription.\n */\nexport const webHooksList = <TData = AxiosResponse<PaginatedWebHooks>>(\n params?: WebHooksListParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/webhooks\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * If the property secret in the configuration is present and contains a non-empty string, it will be used to create a SHA256 hash of the payload with\nthe secret as HMAC key. This hash will be set as X-MicrosoftSpeechServices-Signature header when calling back into the registered URL.\n \nWhen calling back into the registered URL, the request will contain a X-MicrosoftSpeechServices-Event header containing one of the registered event\ntypes. There will be one request per registered event type.\n \nAfter successfully registering the web hook, it will not be usable until a challenge/response is completed. To do this, a request with the event type\nchallenge will be made with a query parameter called validationToken. Respond to the challenge with a 200 OK containing the value of the validationToken\nquery parameter as the response body. When the challenge/response is successfully completed, the web hook will begin receiving events.\n * @summary Creates a new web hook.\n */\nexport const webHooksCreate = <TData = AxiosResponse<WebHook>>(\n webHook: NonReadonly<WebHook>,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/webhooks\", webHook, options)\n}\n\n/**\n * @summary Gets the web hook identified by the given ID.\n */\nexport const webHooksGet = <TData = AxiosResponse<WebHook>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/webhooks/${id}`, options)\n}\n\n/**\n * If the property secret in the configuration is omitted or contains an empty string, future callbacks won't contain X-MicrosoftSpeechServices-Signature\nheaders. If the property contains a non-empty string, it will be used to create a SHA256 hash of the payload with the secret as HMAC key. This hash\nwill be set as X-MicrosoftSpeechServices-Signature header when calling back into the registered URL.\n \nIf the URL changes, the web hook will stop receiving events until a\nchallenge/response is completed. To do this, a request with the event type challenge will be made with a query parameter called validationToken.\nRespond to the challenge with a 200 OK containing the value of the validationToken query parameter as the response body. When the challenge/response\nis successfully completed, the web hook will begin receiving events.\n * @summary Updates the web hook identified by the given ID.\n */\nexport const webHooksUpdate = <TData = AxiosResponse<WebHook>>(\n id: string,\n webHookUpdate: WebHookUpdate,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/webhooks/${id}`, webHookUpdate, options)\n}\n\n/**\n * @summary Deletes the web hook identified by the given ID.\n */\nexport const webHooksDelete = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/webhooks/${id}`, options)\n}\n\n/**\n * The request body of the POST request sent to the registered web hook URL is of the same shape as in the GET request for a specific hook.\nThe Swagger Schema ID of the model is WebHookV3.\n \nThe request will contain a X-MicrosoftSpeechServices-Event header with the value ping. If the web hook was registered with\na secret it will contain a X-MicrosoftSpeechServices-Signature header with an SHA256 hash of the payload with\nthe secret as HMAC key. The hash is base64 encoded.\n * @summary Sends a ping event to the registered URL.\n */\nexport const webHooksPing = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(`/webhooks/${id}:ping`, undefined, options)\n}\n\n/**\n * The payload will be generated from the last entity that would have invoked the web hook. If no entity is present for none of the registered event types,\nthe POST will respond with 204. If a test request can be made, it will respond with 200.\nThe request will contain a X-MicrosoftSpeechServices-Event header with the respective registered event type.\nIf the web hook was registered with a secret it will contain a X-MicrosoftSpeechServices-Signature header with an SHA256 hash of the payload with\nthe secret as HMAC key. The hash is base64 encoded.\n * @summary Sends a request for each registered event type to the registered URL.\n */\nexport const webHooksTest = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(`/webhooks/${id}:test`, undefined, options)\n}\n\n/**\n * @summary Returns the overall health of the service and optionally of the different subcomponents.\n */\nexport const serviceHealthGet = <TData = AxiosResponse<ServiceHealth>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/healthstatus\", options)\n}\n\nexport type DatasetsListSupportedLocalesResult = AxiosResponse<DatasetLocales>\nexport type DatasetsListResult = AxiosResponse<PaginatedDatasets>\nexport type DatasetsCreateResult = AxiosResponse<Dataset>\nexport type DatasetsGetResult = AxiosResponse<Dataset>\nexport type DatasetsUpdateResult = AxiosResponse<Dataset>\nexport type DatasetsDeleteResult = AxiosResponse<void>\nexport type DatasetsGetBlocksResult = AxiosResponse<UploadedBlocks>\nexport type DatasetsUploadBlockResult = AxiosResponse<void>\nexport type DatasetsUploadResult = AxiosResponse<Dataset>\nexport type DatasetsCommitBlocksResult = AxiosResponse<void>\nexport type DatasetsListFilesResult = AxiosResponse<PaginatedFiles>\nexport type DatasetsGetFileResult = AxiosResponse<File>\nexport type EndpointsListSupportedLocalesResult = AxiosResponse<string[]>\nexport type EndpointsListResult = AxiosResponse<PaginatedEndpoints>\nexport type EndpointsCreateResult = AxiosResponse<Endpoint>\nexport type EndpointsGetResult = AxiosResponse<Endpoint>\nexport type EndpointsDeleteResult = AxiosResponse<void>\nexport type EndpointsUpdateResult = AxiosResponse<Endpoint>\nexport type EndpointsListLogsResult = AxiosResponse<PaginatedFiles>\nexport type EndpointsDeleteLogsResult = AxiosResponse<void>\nexport type EndpointsGetLogResult = AxiosResponse<File>\nexport type EndpointsDeleteLogResult = AxiosResponse<void>\nexport type EndpointsListBaseModelLogsResult = AxiosResponse<PaginatedFiles>\nexport type EndpointsDeleteBaseModelLogsResult = AxiosResponse<void>\nexport type EndpointsGetBaseModelLogResult = AxiosResponse<File>\nexport type EndpointsDeleteBaseModelLogResult = AxiosResponse<void>\nexport type EvaluationsListSupportedLocalesResult = AxiosResponse<string[]>\nexport type EvaluationsListResult = AxiosResponse<PaginatedEvaluations>\nexport type EvaluationsCreateResult = AxiosResponse<Evaluation>\nexport type EvaluationsListFilesResult = AxiosResponse<PaginatedFiles>\nexport type EvaluationsGetFileResult = AxiosResponse<File>\nexport type EvaluationsGetResult = AxiosResponse<Evaluation>\nexport type EvaluationsUpdateResult = AxiosResponse<Evaluation>\nexport type EvaluationsDeleteResult = AxiosResponse<void>\nexport type ModelsListSupportedLocalesResult = AxiosResponse<string[]>\nexport type ModelsListCustomModelsResult = AxiosResponse<PaginatedCustomModels>\nexport type ModelsCreateResult = AxiosResponse<CustomModel>\nexport type ModelsListBaseModelsResult = AxiosResponse<PaginatedBaseModels>\nexport type ModelsGetCustomModelResult = AxiosResponse<CustomModel>\nexport type ModelsUpdateResult = AxiosResponse<CustomModel>\nexport type ModelsDeleteResult = AxiosResponse<void>\nexport type ModelsGetBaseModelResult = AxiosResponse<BaseModel>\nexport type ModelsCopyToResult = AxiosResponse<CustomModel>\nexport type ModelsGetCustomModelManifestResult = AxiosResponse<ModelManifest>\nexport type ModelsGetBaseModelManifestResult = AxiosResponse<ModelManifest>\nexport type ModelsListFilesResult = AxiosResponse<PaginatedFiles>\nexport type ModelsGetFileResult = AxiosResponse<File>\nexport type ProjectsListSupportedLocalesResult = AxiosResponse<string[]>\nexport type ProjectsListResult = AxiosResponse<PaginatedProjects>\nexport type ProjectsCreateResult = AxiosResponse<Project>\nexport type ProjectsGetResult = AxiosResponse<Project>\nexport type ProjectsUpdateResult = AxiosResponse<Project>\nexport type ProjectsDeleteResult = AxiosResponse<void>\nexport type ProjectsListEvaluationsResult = AxiosResponse<PaginatedEvaluations>\nexport type ProjectsListDatasetsResult = AxiosResponse<PaginatedDatasets>\nexport type ProjectsListEndpointsResult = AxiosResponse<PaginatedEndpoints>\nexport type ProjectsListModelsResult = AxiosResponse<PaginatedCustomModels>\nexport type ProjectsListTranscriptionsResult = AxiosResponse<PaginatedTranscriptions>\nexport type TranscriptionsListSupportedLocalesResult = AxiosResponse<string[]>\nexport type TranscriptionsListResult = AxiosResponse<PaginatedTranscriptions>\nexport type TranscriptionsCreateResult = AxiosResponse<Transcription>\nexport type TranscriptionsGetResult = AxiosResponse<Transcription>\nexport type TranscriptionsUpdateResult = AxiosResponse<Transcription>\nexport type TranscriptionsDeleteResult = AxiosResponse<void>\nexport type TranscriptionsListFilesResult = AxiosResponse<PaginatedFiles>\nexport type TranscriptionsGetFileResult = AxiosResponse<File>\nexport type WebHooksListResult = AxiosResponse<PaginatedWebHooks>\nexport type WebHooksCreateResult = AxiosResponse<WebHook>\nexport type WebHooksGetResult = AxiosResponse<WebHook>\nexport type WebHooksUpdateResult = AxiosResponse<WebHook>\nexport type WebHooksDeleteResult = AxiosResponse<void>\nexport type WebHooksPingResult = AxiosResponse<void>\nexport type WebHooksTestResult = AxiosResponse<void>\nexport type ServiceHealthGetResult = AxiosResponse<ServiceHealth>\n\nexport const getDatasetsListSupportedLocalesResponseMock = (): DatasetLocales => ({\n [faker.string.alphanumeric(5)]: faker.helpers.arrayElements(Object.values(DatasetKind))\n})\n\nexport const getDatasetsListResponseMock = (\n overrideResponse: Partial<PaginatedDatasets> = {}\n): PaginatedDatasets => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n links: faker.helpers.arrayElement([\n {\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n commitBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n listBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n uploadBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n acceptedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n rejectedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n kind: faker.helpers.arrayElement(Object.values(DatasetKind)),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getDatasetsCreateResponseMock = (\n overrideResponse: Partial<Dataset> = {}\n): Dataset => ({\n links: faker.helpers.arrayElement([\n {\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n commitBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n listBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n uploadBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n acceptedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n rejectedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n kind: faker.helpers.arrayElement(Object.values(DatasetKind)),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getDatasetsGetResponseMock = (overrideResponse: Partial<Dataset> = {}): Dataset => ({\n links: faker.helpers.arrayElement([\n {\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n commitBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n listBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n uploadBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n acceptedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n rejectedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n kind: faker.helpers.arrayElement(Object.values(DatasetKind)),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getDatasetsUpdateResponseMock = (\n overrideResponse: Partial<Dataset> = {}\n): Dataset => ({\n links: faker.helpers.arrayElement([\n {\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n commitBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n listBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n uploadBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n acceptedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n rejectedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n kind: faker.helpers.arrayElement(Object.values(DatasetKind)),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getDatasetsGetBlocksResponseMock = (\n overrideResponse: Partial<UploadedBlocks> = {}\n): UploadedBlocks => ({\n committedBlocks: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ])\n })),\n undefined\n ]),\n uncommittedBlocks: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ])\n })),\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getDatasetsUploadResponseMock = (\n overrideResponse: Partial<Dataset> = {}\n): Dataset => ({\n links: faker.helpers.arrayElement([\n {\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n commitBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n listBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n uploadBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n acceptedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n rejectedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n kind: faker.helpers.arrayElement(Object.values(DatasetKind)),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getDatasetsListFilesResponseMock = (\n overrideResponse: Partial<PaginatedFiles> = {}\n): PaginatedFiles => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getDatasetsGetFileResponseMock = (overrideResponse: Partial<File> = {}): File => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEndpointsListSupportedLocalesResponseMock = (): string[] =>\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, () => faker.word.sample())\n\nexport const getEndpointsListResponseMock = (\n overrideResponse: Partial<PaginatedEndpoints> = {}\n): PaginatedEndpoints => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n restInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n logs: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n loggingEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEndpointsCreateResponseMock = (\n overrideResponse: Partial<Endpoint> = {}\n): Endpoint => ({\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n restInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n logs: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n loggingEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getEndpointsGetResponseMock = (\n overrideResponse: Partial<Endpoint> = {}\n): Endpoint => ({\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n restInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n logs: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n loggingEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getEndpointsUpdateResponseMock = (\n overrideResponse: Partial<Endpoint> = {}\n): Endpoint => ({\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n restInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n logs: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n loggingEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getEndpointsListLogsResponseMock = (\n overrideResponse: Partial<PaginatedFiles> = {}\n): PaginatedFiles => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEndpointsGetLogResponseMock = (overrideResponse: Partial<File> = {}): File => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEndpointsListBaseModelLogsResponseMock = (\n overrideResponse: Partial<PaginatedFiles> = {}\n): PaginatedFiles => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEndpointsGetBaseModelLogResponseMock = (\n overrideResponse: Partial<File> = {}\n): File => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEvaluationsListSupportedLocalesResponseMock = (): string[] =>\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, () => faker.word.sample())\n\nexport const getEvaluationsListResponseMock = (\n overrideResponse: Partial<PaginatedEvaluations> = {}\n): PaginatedEvaluations => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n model1: { self: faker.internet.url() },\n model2: { self: faker.internet.url() },\n transcription1: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n transcription2: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: { self: faker.internet.url() },\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n wordErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n wordErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n sentenceErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20)\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEvaluationsCreateResponseMock = (\n overrideResponse: Partial<Evaluation> = {}\n): Evaluation => ({\n model1: { self: faker.internet.url() },\n model2: { self: faker.internet.url() },\n transcription1: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n transcription2: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: { self: faker.internet.url() },\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n wordErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n wordErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n sentenceErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n ...overrideResponse\n})\n\nexport const getEvaluationsListFilesResponseMock = (\n overrideResponse: Partial<PaginatedFiles> = {}\n): PaginatedFiles => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEvaluationsGetFileResponseMock = (overrideResponse: Partial<File> = {}): File => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEvaluationsGetResponseMock = (\n overrideResponse: Partial<Evaluation> = {}\n): Evaluation => ({\n model1: { self: faker.internet.url() },\n model2: { self: faker.internet.url() },\n transcription1: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n transcription2: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: { self: faker.internet.url() },\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n wordErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n wordErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n sentenceErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n ...overrideResponse\n})\n\nexport const getEvaluationsUpdateResponseMock = (\n overrideResponse: Partial<Evaluation> = {}\n): Evaluation => ({\n model1: { self: faker.internet.url() },\n model2: { self: faker.internet.url() },\n transcription1: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n transcription2: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: { self: faker.internet.url() },\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n wordErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n wordErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n sentenceErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n ...overrideResponse\n})\n\nexport const getModelsListSupportedLocalesResponseMock = (): string[] =>\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, () => faker.word.sample())\n\nexport const getModelsListCustomModelsResponseMock = (\n overrideResponse: Partial<PaginatedCustomModels> = {}\n): PaginatedCustomModels => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n copyTo: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n manifest: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n customModelWeightPercent: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 100 }),\n undefined\n ]),\n deprecationDates: faker.helpers.arrayElement([\n {\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n baseModel: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n datasets: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n self: faker.internet.url()\n })),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getModelsCreateResponseMock = (): CustomModel => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n copyTo: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n manifest: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n customModelWeightPercent: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 100 }),\n undefined\n ]),\n deprecationDates: faker.helpers.arrayElement([\n {\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n baseModel: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n datasets: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n self: faker.internet.url()\n })),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ])\n})\n\nexport const getModelsListBaseModelsResponseMock = (\n overrideResponse: Partial<PaginatedBaseModels> = {}\n): PaginatedBaseModels => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n links: faker.helpers.arrayElement([\n { manifest: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n deprecationDates: faker.helpers.arrayElement([\n {\n adaptationDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n supportsAdaptationsWith: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(DatasetKind)),\n undefined\n ])\n },\n undefined\n ])\n },\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getModelsGetCustomModelResponseMock = (): CustomModel => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n copyTo: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n manifest: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n customModelWeightPercent: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 100 }),\n undefined\n ]),\n deprecationDates: faker.helpers.arrayElement([\n {\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n baseModel: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n datasets: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n self: faker.internet.url()\n })),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ])\n})\n\nexport const getModelsUpdateResponseMock = (): CustomModel => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n copyTo: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n manifest: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n customModelWeightPercent: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 100 }),\n undefined\n ]),\n deprecationDates: faker.helpers.arrayElement([\n {\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n baseModel: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n datasets: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n self: faker.internet.url()\n })),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ])\n})\n\nexport const getModelsGetBaseModelResponseMock = (): BaseModel => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n links: faker.helpers.arrayElement([\n { manifest: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n deprecationDates: faker.helpers.arrayElement([\n {\n adaptationDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n supportsAdaptationsWith: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(DatasetKind)),\n undefined\n ])\n },\n undefined\n ])\n },\n undefined\n ])\n})\n\nexport const getModelsCopyToResponseMock = (): CustomModel => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n copyTo: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n manifest: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n customModelWeightPercent: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 100 }),\n undefined\n ]),\n deprecationDates: faker.helpers.arrayElement([\n {\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n baseModel: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n datasets: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n self: faker.internet.url()\n })),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ])\n})\n\nexport const getModelsGetCustomModelManifestResponseMock = (\n overrideResponse: Partial<ModelManifest> = {}\n): ModelManifest => ({\n model: { self: faker.internet.url() },\n modelFiles: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })\n ),\n properties: {\n [faker.string.alphanumeric(5)]: {}\n },\n ...overrideResponse\n})\n\nexport const getModelsGetBaseModelManifestResponseMock = (\n overrideResponse: Partial<ModelManifest> = {}\n): ModelManifest => ({\n model: { self: faker.internet.url() },\n modelFiles: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })\n ),\n properties: {\n [faker.string.alphanumeric(5)]: {}\n },\n ...overrideResponse\n})\n\nexport const getModelsListFilesResponseMock = (\n overrideResponse: Partial<PaginatedFiles> = {}\n): PaginatedFiles => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getModelsGetFileResponseMock = (overrideResponse: Partial<File> = {}): File => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getProjectsListSupportedLocalesResponseMock = (): string[] =>\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, () => faker.word.sample())\n\nexport const getProjectsListResponseMock = (\n overrideResponse: Partial<PaginatedProjects> = {}\n): PaginatedProjects => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n links: faker.helpers.arrayElement([\n {\n evaluations: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n datasets: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n models: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n endpoints: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n transcriptions: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n datasetCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n evaluationCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n modelCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n transcriptionCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n endpointCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getProjectsCreateResponseMock = (\n overrideResponse: Partial<Project> = {}\n): Project => ({\n links: faker.helpers.arrayElement([\n {\n evaluations: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n datasets: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n models: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n endpoints: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n transcriptions: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n datasetCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n evaluationCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n modelCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n transcriptionCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n endpointCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getProjectsGetResponseMock = (overrideResponse: Partial<Project> = {}): Project => ({\n links: faker.helpers.arrayElement([\n {\n evaluations: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n datasets: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n models: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n endpoints: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n transcriptions: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n datasetCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n evaluationCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n modelCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n transcriptionCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n endpointCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getProjectsUpdateResponseMock = (\n overrideResponse: Partial<Project> = {}\n): Project => ({\n links: faker.helpers.arrayElement([\n {\n evaluations: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n datasets: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n models: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n endpoints: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n transcriptions: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n datasetCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n evaluationCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n modelCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n transcriptionCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n endpointCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getProjectsListEvaluationsResponseMock = (\n overrideResponse: Partial<PaginatedEvaluations> = {}\n): PaginatedEvaluations => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n model1: { self: faker.internet.url() },\n model2: { self: faker.internet.url() },\n transcription1: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n transcription2: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: { self: faker.internet.url() },\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n wordErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n wordErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n sentenceErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20)\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getProjectsListDatasetsResponseMock = (\n overrideResponse: Partial<PaginatedDatasets> = {}\n): PaginatedDatasets => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n links: faker.helpers.arrayElement([\n {\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n commitBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n listBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n uploadBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n acceptedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n rejectedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n kind: faker.helpers.arrayElement(Object.values(DatasetKind)),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getProjectsListEndpointsResponseMock = (\n overrideResponse: Partial<PaginatedEndpoints> = {}\n): PaginatedEndpoints => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n restInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n logs: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n loggingEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getProjectsListModelsResponseMock = (\n overrideResponse: Partial<PaginatedCustomModels> = {}\n): PaginatedCustomModels => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n copyTo: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n manifest: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n customModelWeightPercent: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 100 }),\n undefined\n ]),\n deprecationDates: faker.helpers.arrayElement([\n {\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n baseModel: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n datasets: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n self: faker.internet.url()\n })),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getProjectsListTranscriptionsResponseMock = (\n overrideResponse: Partial<PaginatedTranscriptions> = {}\n): PaginatedTranscriptions => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n diarizationEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n wordLevelTimestampsEnabled: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n displayFormWordLevelTimestampsEnabled: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n channels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.number.int({ min: undefined, max: undefined })\n ),\n undefined\n ]),\n destinationContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n punctuationMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(PunctuationMode)),\n undefined\n ]),\n profanityFilterMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(ProfanityFilterMode)),\n undefined\n ]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n diarization: faker.helpers.arrayElement([\n {\n speakers: {\n minCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maxCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n languageIdentification: faker.helpers.arrayElement([\n {\n candidateLocales: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20)),\n speechModelMapping: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: { self: faker.internet.url() }\n },\n undefined\n ])\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrls: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.internet.url()\n ),\n undefined\n ]),\n contentContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getTranscriptionsListSupportedLocalesResponseMock = (): string[] =>\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, () => faker.word.sample())\n\nexport const getTranscriptionsListResponseMock = (\n overrideResponse: Partial<PaginatedTranscriptions> = {}\n): PaginatedTranscriptions => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n diarizationEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n wordLevelTimestampsEnabled: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n displayFormWordLevelTimestampsEnabled: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n channels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.number.int({ min: undefined, max: undefined })\n ),\n undefined\n ]),\n destinationContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n punctuationMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(PunctuationMode)),\n undefined\n ]),\n profanityFilterMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(ProfanityFilterMode)),\n undefined\n ]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n diarization: faker.helpers.arrayElement([\n {\n speakers: {\n minCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maxCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n languageIdentification: faker.helpers.arrayElement([\n {\n candidateLocales: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20)),\n speechModelMapping: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: { self: faker.internet.url() }\n },\n undefined\n ])\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrls: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.internet.url()\n ),\n undefined\n ]),\n contentContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getTranscriptionsCreateResponseMock = (\n overrideResponse: Partial<Transcription> = {}\n): Transcription => ({\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n diarizationEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n wordLevelTimestampsEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n displayFormWordLevelTimestampsEnabled: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n channels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.number.int({ min: undefined, max: undefined })\n ),\n undefined\n ]),\n destinationContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n punctuationMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(PunctuationMode)),\n undefined\n ]),\n profanityFilterMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(ProfanityFilterMode)),\n undefined\n ]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n diarization: faker.helpers.arrayElement([\n {\n speakers: {\n minCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maxCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n languageIdentification: faker.helpers.arrayElement([\n {\n candidateLocales: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20)),\n speechModelMapping: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: { self: faker.internet.url() }\n },\n undefined\n ])\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrls: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.internet.url()\n ),\n undefined\n ]),\n contentContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getTranscriptionsGetResponseMock = (\n overrideResponse: Partial<Transcription> = {}\n): Transcription => ({\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n diarizationEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n wordLevelTimestampsEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n displayFormWordLevelTimestampsEnabled: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n channels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.number.int({ min: undefined, max: undefined })\n ),\n undefined\n ]),\n destinationContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n punctuationMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(PunctuationMode)),\n undefined\n ]),\n profanityFilterMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(ProfanityFilterMode)),\n undefined\n ]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n diarization: faker.helpers.arrayElement([\n {\n speakers: {\n minCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maxCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n languageIdentification: faker.helpers.arrayElement([\n {\n candidateLocales: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20)),\n speechModelMapping: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: { self: faker.internet.url() }\n },\n undefined\n ])\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrls: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.internet.url()\n ),\n undefined\n ]),\n contentContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getTranscriptionsUpdateResponseMock = (\n overrideResponse: Partial<Transcription> = {}\n): Transcription => ({\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n diarizationEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n wordLevelTimestampsEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n displayFormWordLevelTimestampsEnabled: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n channels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.number.int({ min: undefined, max: undefined })\n ),\n undefined\n ]),\n destinationContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n punctuationMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(PunctuationMode)),\n undefined\n ]),\n profanityFilterMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(ProfanityFilterMode)),\n undefined\n ]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n diarization: faker.helpers.arrayElement([\n {\n speakers: {\n minCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maxCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n languageIdentification: faker.helpers.arrayElement([\n {\n candidateLocales: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20)),\n speechModelMapping: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: { self: faker.internet.url() }\n },\n undefined\n ])\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrls: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.internet.url()\n ),\n undefined\n ]),\n contentContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getTranscriptionsListFilesResponseMock = (\n overrideResponse: Partial<PaginatedFiles> = {}\n): PaginatedFiles => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getTranscriptionsGetFileResponseMock = (\n overrideResponse: Partial<File> = {}\n): File => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getWebHooksListResponseMock = (\n overrideResponse: Partial<PaginatedWebHooks> = {}\n): PaginatedWebHooks => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n webUrl: faker.internet.url(),\n links: faker.helpers.arrayElement([\n {\n ping: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n test: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n apiVersion: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n secret: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n events: {\n datasetCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n ping: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n challenge: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n },\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getWebHooksCreateResponseMock = (\n overrideResponse: Partial<WebHook> = {}\n): WebHook => ({\n webUrl: faker.internet.url(),\n links: faker.helpers.arrayElement([\n {\n ping: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n test: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n apiVersion: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n secret: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n events: {\n datasetCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n ping: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n challenge: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n },\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getWebHooksGetResponseMock = (overrideResponse: Partial<WebHook> = {}): WebHook => ({\n webUrl: faker.internet.url(),\n links: faker.helpers.arrayElement([\n {\n ping: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n test: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n apiVersion: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n secret: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n events: {\n datasetCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n ping: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n challenge: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n },\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getWebHooksUpdateResponseMock = (\n overrideResponse: Partial<WebHook> = {}\n): WebHook => ({\n webUrl: faker.internet.url(),\n links: faker.helpers.arrayElement([\n {\n ping: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n test: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n apiVersion: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n secret: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n events: {\n datasetCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n ping: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n challenge: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n },\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getServiceHealthGetResponseMock = (\n overrideResponse: Partial<ServiceHealth> = {}\n): ServiceHealth => ({\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(HealthStatus)),\n undefined\n ]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n components: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(HealthStatus)),\n undefined\n ]),\n type: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n })),\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getDatasetsListSupportedLocalesMockHandler = (\n overrideResponse?:\n | DatasetLocales\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<DatasetLocales> | DatasetLocales)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/datasets/locales\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsListSupportedLocalesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsListMockHandler = (\n overrideResponse?:\n | PaginatedDatasets\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedDatasets> | PaginatedDatasets)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/datasets\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsListResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsCreateMockHandler = (\n overrideResponse?:\n | Dataset\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<Dataset> | Dataset)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/datasets\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsCreateResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsGetMockHandler = (\n overrideResponse?:\n | Dataset\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Dataset> | Dataset)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/datasets/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsGetResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsUpdateMockHandler = (\n overrideResponse?:\n | Dataset\n | ((info: Parameters<Parameters<typeof http.patch>[1]>[0]) => Promise<Dataset> | Dataset)\n) => {\n return http.patch(\"https://api.cognitive.microsoft.com/datasets/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsUpdateResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsDeleteMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.cognitive.microsoft.com/datasets/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getDatasetsGetBlocksMockHandler = (\n overrideResponse?:\n | UploadedBlocks\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<UploadedBlocks> | UploadedBlocks)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/datasets/:id/blocks\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsGetBlocksResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsUploadBlockMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.put>[1]>[0]) => Promise<void> | void)\n) => {\n return http.put(\"https://api.cognitive.microsoft.com/datasets/:id/blocks\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 201 })\n })\n}\n\nexport const getDatasetsUploadMockHandler = (\n overrideResponse?:\n | Dataset\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<Dataset> | Dataset)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/datasets/upload\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsUploadResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsCommitBlocksMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<void> | void)\n) => {\n return http.post(\n \"https://api.cognitive.microsoft.com/datasets/:id/blocks:commit\",\n async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 200 })\n }\n )\n}\n\nexport const getDatasetsListFilesMockHandler = (\n overrideResponse?:\n | PaginatedFiles\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedFiles> | PaginatedFiles)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/datasets/:id/files\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsListFilesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsGetFileMockHandler = (\n overrideResponse?:\n | File\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<File> | File)\n) => {\n return http.get(\n \"https://api.cognitive.microsoft.com/datasets/:id/files/:fileId\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsGetFileResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getEndpointsListSupportedLocalesMockHandler = (\n overrideResponse?:\n | string[]\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<string[]> | string[])\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/endpoints/locales\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsListSupportedLocalesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEndpointsListMockHandler = (\n overrideResponse?:\n | PaginatedEndpoints\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedEndpoints> | PaginatedEndpoints)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/endpoints\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsListResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEndpointsCreateMockHandler = (\n overrideResponse?:\n | Endpoint\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<Endpoint> | Endpoint)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/endpoints\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsCreateResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEndpointsGetMockHandler = (\n overrideResponse?:\n | Endpoint\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Endpoint> | Endpoint)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/endpoints/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsGetResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEndpointsDeleteMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.cognitive.microsoft.com/endpoints/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getEndpointsUpdateMockHandler = (\n overrideResponse?:\n | Endpoint\n | ((info: Parameters<Parameters<typeof http.patch>[1]>[0]) => Promise<Endpoint> | Endpoint)\n) => {\n return http.patch(\"https://api.cognitive.microsoft.com/endpoints/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsUpdateResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEndpointsListLogsMockHandler = (\n overrideResponse?:\n | PaginatedFiles\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedFiles> | PaginatedFiles)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/endpoints/:id/files/logs\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsListLogsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEndpointsDeleteLogsMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\n \"https://api.cognitive.microsoft.com/endpoints/:id/files/logs\",\n async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 202 })\n }\n )\n}\n\nexport const getEndpointsGetLogMockHandler = (\n overrideResponse?:\n | File\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<File> | File)\n) => {\n return http.get(\n \"https://api.cognitive.microsoft.com/endpoints/:id/files/logs/:logId\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsGetLogResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getEndpointsDeleteLogMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\n \"https://api.cognitive.microsoft.com/endpoints/:id/files/logs/:logId\",\n async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n }\n )\n}\n\nexport const getEndpointsListBaseModelLogsMockHandler = (\n overrideResponse?:\n | PaginatedFiles\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedFiles> | PaginatedFiles)\n) => {\n return http.get(\n \"https://api.cognitive.microsoft.com/endpoints/base/:locale/files/logs\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsListBaseModelLogsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getEndpointsDeleteBaseModelLogsMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\n \"https://api.cognitive.microsoft.com/endpoints/base/:locale/files/logs\",\n async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 202 })\n }\n )\n}\n\nexport const getEndpointsGetBaseModelLogMockHandler = (\n overrideResponse?:\n | File\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<File> | File)\n) => {\n return http.get(\n \"https://api.cognitive.microsoft.com/endpoints/base/:locale/files/logs/:logId\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsGetBaseModelLogResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getEndpointsDeleteBaseModelLogMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\n \"https://api.cognitive.microsoft.com/endpoints/base/:locale/files/logs/:logId\",\n async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n }\n )\n}\n\nexport const getEvaluationsListSupportedLocalesMockHandler = (\n overrideResponse?:\n | string[]\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<string[]> | string[])\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/evaluations/locales\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEvaluationsListSupportedLocalesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEvaluationsListMockHandler = (\n overrideResponse?:\n | PaginatedEvaluations\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedEvaluations> | PaginatedEvaluations)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/evaluations\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEvaluationsListResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEvaluationsCreateMockHandler = (\n overrideResponse?:\n | Evaluation\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<Evaluation> | Evaluation)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/evaluations\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEvaluationsCreateResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEvaluationsListFilesMockHandler = (\n overrideResponse?:\n | PaginatedFiles\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedFiles> | PaginatedFiles)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/evaluations/:id/files\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEvaluationsListFilesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEvaluationsGetFileMockHandler = (\n overrideResponse?:\n | File\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<File> | File)\n) => {\n return http.get(\n \"https://api.cognitive.microsoft.com/evaluations/:id/files/:fileId\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEvaluationsGetFileResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getEvaluationsGetMockHandler = (\n overrideResponse?:\n | Evaluation\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Evaluation> | Evaluation)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/evaluations/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEvaluationsGetResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEvaluationsUpdateMockHandler = (\n overrideResponse?:\n | Evaluation\n | ((info: Parameters<Parameters<typeof http.patch>[1]>[0]) => Promise<Evaluation> | Evaluation)\n) => {\n return http.patch(\"https://api.cognitive.microsoft.com/evaluations/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEvaluationsUpdateResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEvaluationsDeleteMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.cognitive.microsoft.com/evaluations/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getModelsListSupportedLocalesMockHandler = (\n overrideResponse?:\n | string[]\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<string[]> | string[])\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/locales\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsListSupportedLocalesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsListCustomModelsMockHandler = (\n overrideResponse?:\n | PaginatedCustomModels\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedCustomModels> | PaginatedCustomModels)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsListCustomModelsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsCreateMockHandler = (\n overrideResponse?:\n | CustomModel\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<CustomModel> | CustomModel)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/models\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsCreateResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsListBaseModelsMockHandler = (\n overrideResponse?:\n | PaginatedBaseModels\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedBaseModels> | PaginatedBaseModels)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/base\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsListBaseModelsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsGetCustomModelMockHandler = (\n overrideResponse?:\n | CustomModel\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<CustomModel> | CustomModel)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsGetCustomModelResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsUpdateMockHandler = (\n overrideResponse?:\n | CustomModel\n | ((\n info: Parameters<Parameters<typeof http.patch>[1]>[0]\n ) => Promise<CustomModel> | CustomModel)\n) => {\n return http.patch(\"https://api.cognitive.microsoft.com/models/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsUpdateResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsDeleteMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.cognitive.microsoft.com/models/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getModelsGetBaseModelMockHandler = (\n overrideResponse?:\n | BaseModel\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<BaseModel> | BaseModel)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/base/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsGetBaseModelResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsCopyToMockHandler = (\n overrideResponse?:\n | CustomModel\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<CustomModel> | CustomModel)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/models/:id:copyto\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsCopyToResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsGetCustomModelManifestMockHandler = (\n overrideResponse?:\n | ModelManifest\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ModelManifest> | ModelManifest)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/:id/manifest\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsGetCustomModelManifestResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsGetBaseModelManifestMockHandler = (\n overrideResponse?:\n | ModelManifest\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ModelManifest> | ModelManifest)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/base/:id/manifest\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsGetBaseModelManifestResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsListFilesMockHandler = (\n overrideResponse?:\n | PaginatedFiles\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedFiles> | PaginatedFiles)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/:id/files\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsListFilesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsGetFileMockHandler = (\n overrideResponse?:\n | File\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<File> | File)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/:id/files/:fileId\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsGetFileResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsListSupportedLocalesMockHandler = (\n overrideResponse?:\n | string[]\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<string[]> | string[])\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/projects/locales\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsListSupportedLocalesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsListMockHandler = (\n overrideResponse?:\n | PaginatedProjects\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedProjects> | PaginatedProjects)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/projects\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsListResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsCreateMockHandler = (\n overrideResponse?:\n | Project\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<Project> | Project)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/projects\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsCreateResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsGetMockHandler = (\n overrideResponse?:\n | Project\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Project> | Project)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/projects/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsGetResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsUpdateMockHandler = (\n overrideResponse?:\n | Project\n | ((info: Parameters<Parameters<typeof http.patch>[1]>[0]) => Promise<Project> | Project)\n) => {\n return http.patch(\"https://api.cognitive.microsoft.com/projects/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsUpdateResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsDeleteMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.cognitive.microsoft.com/projects/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getProjectsListEvaluationsMockHandler = (\n overrideResponse?:\n | PaginatedEvaluations\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedEvaluations> | PaginatedEvaluations)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/projects/:id/evaluations\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsListEvaluationsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsListDatasetsMockHandler = (\n overrideResponse?:\n | PaginatedDatasets\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedDatasets> | PaginatedDatasets)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/projects/:id/datasets\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsListDatasetsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsListEndpointsMockHandler = (\n overrideResponse?:\n | PaginatedEndpoints\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedEndpoints> | PaginatedEndpoints)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/projects/:id/endpoints\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsListEndpointsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsListModelsMockHandler = (\n overrideResponse?:\n | PaginatedCustomModels\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedCustomModels> | PaginatedCustomModels)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/projects/:id/models\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsListModelsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsListTranscriptionsMockHandler = (\n overrideResponse?:\n | PaginatedTranscriptions\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedTranscriptions> | PaginatedTranscriptions)\n) => {\n return http.get(\n \"https://api.cognitive.microsoft.com/projects/:id/transcriptions\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsListTranscriptionsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getTranscriptionsListSupportedLocalesMockHandler = (\n overrideResponse?:\n | string[]\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<string[]> | string[])\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/transcriptions/locales\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionsListSupportedLocalesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionsListMockHandler = (\n overrideResponse?:\n | PaginatedTranscriptions\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedTranscriptions> | PaginatedTranscriptions)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/transcriptions\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionsListResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionsCreateMockHandler = (\n overrideResponse?:\n | Transcription\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<Transcription> | Transcription)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/transcriptions\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionsCreateResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionsGetMockHandler = (\n overrideResponse?:\n | Transcription\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<Transcription> | Transcription)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/transcriptions/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionsGetResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionsUpdateMockHandler = (\n overrideResponse?:\n | Transcription\n | ((\n info: Parameters<Parameters<typeof http.patch>[1]>[0]\n ) => Promise<Transcription> | Transcription)\n) => {\n return http.patch(\"https://api.cognitive.microsoft.com/transcriptions/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionsUpdateResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionsDeleteMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.cognitive.microsoft.com/transcriptions/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getTranscriptionsListFilesMockHandler = (\n overrideResponse?:\n | PaginatedFiles\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedFiles> | PaginatedFiles)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/transcriptions/:id/files\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionsListFilesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionsGetFileMockHandler = (\n overrideResponse?:\n | File\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<File> | File)\n) => {\n return http.get(\n \"https://api.cognitive.microsoft.com/transcriptions/:id/files/:fileId\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionsGetFileResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getWebHooksListMockHandler = (\n overrideResponse?:\n | PaginatedWebHooks\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedWebHooks> | PaginatedWebHooks)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/webhooks\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getWebHooksListResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getWebHooksCreateMockHandler = (\n overrideResponse?:\n | WebHook\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<WebHook> | WebHook)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/webhooks\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getWebHooksCreateResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getWebHooksGetMockHandler = (\n overrideResponse?:\n | WebHook\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<WebHook> | WebHook)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/webhooks/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getWebHooksGetResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getWebHooksUpdateMockHandler = (\n overrideResponse?:\n | WebHook\n | ((info: Parameters<Parameters<typeof http.patch>[1]>[0]) => Promise<WebHook> | WebHook)\n) => {\n return http.patch(\"https://api.cognitive.microsoft.com/webhooks/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getWebHooksUpdateResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getWebHooksDeleteMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.cognitive.microsoft.com/webhooks/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getWebHooksPingMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<void> | void)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/webhooks/:id:ping\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 202 })\n })\n}\n\nexport const getWebHooksTestMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<void> | void)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/webhooks/:id:test\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 202 })\n })\n}\n\nexport const getServiceHealthGetMockHandler = (\n overrideResponse?:\n | ServiceHealth\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ServiceHealth> | ServiceHealth)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/healthstatus\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getServiceHealthGetResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\nexport const getSpeechServicesAPIV31Mock = () => [\n getDatasetsListSupportedLocalesMockHandler(),\n getDatasetsListMockHandler(),\n getDatasetsCreateMockHandler(),\n getDatasetsGetMockHandler(),\n getDatasetsUpdateMockHandler(),\n getDatasetsDeleteMockHandler(),\n getDatasetsGetBlocksMockHandler(),\n getDatasetsUploadBlockMockHandler(),\n getDatasetsUploadMockHandler(),\n getDatasetsCommitBlocksMockHandler(),\n getDatasetsListFilesMockHandler(),\n getDatasetsGetFileMockHandler(),\n getEndpointsListSupportedLocalesMockHandler(),\n getEndpointsListMockHandler(),\n getEndpointsCreateMockHandler(),\n getEndpointsGetMockHandler(),\n getEndpointsDeleteMockHandler(),\n getEndpointsUpdateMockHandler(),\n getEndpointsListLogsMockHandler(),\n getEndpointsDeleteLogsMockHandler(),\n getEndpointsGetLogMockHandler(),\n getEndpointsDeleteLogMockHandler(),\n getEndpointsListBaseModelLogsMockHandler(),\n getEndpointsDeleteBaseModelLogsMockHandler(),\n getEndpointsGetBaseModelLogMockHandler(),\n getEndpointsDeleteBaseModelLogMockHandler(),\n getEvaluationsListSupportedLocalesMockHandler(),\n getEvaluationsListMockHandler(),\n getEvaluationsCreateMockHandler(),\n getEvaluationsListFilesMockHandler(),\n getEvaluationsGetFileMockHandler(),\n getEvaluationsGetMockHandler(),\n getEvaluationsUpdateMockHandler(),\n getEvaluationsDeleteMockHandler(),\n getModelsListSupportedLocalesMockHandler(),\n getModelsListCustomModelsMockHandler(),\n getModelsCreateMockHandler(),\n getModelsListBaseModelsMockHandler(),\n getModelsGetCustomModelMockHandler(),\n getModelsUpdateMockHandler(),\n getModelsDeleteMockHandler(),\n getModelsGetBaseModelMockHandler(),\n getModelsCopyToMockHandler(),\n getModelsGetCustomModelManifestMockHandler(),\n getModelsGetBaseModelManifestMockHandler(),\n getModelsListFilesMockHandler(),\n getModelsGetFileMockHandler(),\n getProjectsListSupportedLocalesMockHandler(),\n getProjectsListMockHandler(),\n getProjectsCreateMockHandler(),\n getProjectsGetMockHandler(),\n getProjectsUpdateMockHandler(),\n getProjectsDeleteMockHandler(),\n getProjectsListEvaluationsMockHandler(),\n getProjectsListDatasetsMockHandler(),\n getProjectsListEndpointsMockHandler(),\n getProjectsListModelsMockHandler(),\n getProjectsListTranscriptionsMockHandler(),\n getTranscriptionsListSupportedLocalesMockHandler(),\n getTranscriptionsListMockHandler(),\n getTranscriptionsCreateMockHandler(),\n getTranscriptionsGetMockHandler(),\n getTranscriptionsUpdateMockHandler(),\n getTranscriptionsDeleteMockHandler(),\n getTranscriptionsListFilesMockHandler(),\n getTranscriptionsGetFileMockHandler(),\n getWebHooksListMockHandler(),\n getWebHooksCreateMockHandler(),\n getWebHooksGetMockHandler(),\n getWebHooksUpdateMockHandler(),\n getWebHooksDeleteMockHandler(),\n getWebHooksPingMockHandler(),\n getWebHooksTestMockHandler(),\n getServiceHealthGetMockHandler()\n]\n","/**\n * OpenAI Whisper transcription provider adapter\n * Documentation: https://platform.openai.com/docs/guides/speech-to-text\n */\n\nimport axios from \"axios\"\nimport type {\n AudioInput,\n ProviderCapabilities,\n TranscribeOptions,\n UnifiedTranscriptResponse\n} from \"../router/types\"\nimport { BaseAdapter, type ProviderConfig } from \"./base-adapter\"\n\n// Import generated API client function - FULL TYPE SAFETY!\nimport { createTranscription } from \"../generated/openai/api/openAIAPI\"\n\n// Import OpenAI generated types\nimport type { CreateTranscriptionRequest } from \"../generated/openai/schema/createTranscriptionRequest\"\nimport type { CreateTranscriptionResponseVerboseJson } from \"../generated/openai/schema/createTranscriptionResponseVerboseJson\"\nimport type { CreateTranscriptionResponseDiarizedJson } from \"../generated/openai/schema/createTranscriptionResponseDiarizedJson\"\nimport type { AudioTranscriptionModel } from \"../generated/openai/schema/audioTranscriptionModel\"\n\n/**\n * OpenAI Whisper transcription provider adapter\n *\n * Implements transcription for OpenAI's Whisper and GPT-4o transcription models with support for:\n * - Multiple model options: whisper-1, gpt-4o-transcribe, gpt-4o-mini-transcribe, gpt-4o-transcribe-diarize\n * - Speaker diarization (with gpt-4o-transcribe-diarize model)\n * - Word-level timestamps\n * - Multi-language support\n * - Prompt-based style guidance\n * - Known speaker references for improved diarization\n * - Temperature control for output randomness\n *\n * @see https://platform.openai.com/docs/guides/speech-to-text OpenAI Speech-to-Text Documentation\n * @see https://platform.openai.com/docs/api-reference/audio OpenAI Audio API Reference\n *\n * @example Basic transcription\n * ```typescript\n * import { OpenAIWhisperAdapter } from '@meeting-baas/sdk';\n *\n * const adapter = new OpenAIWhisperAdapter();\n * adapter.initialize({\n * apiKey: process.env.OPENAI_API_KEY\n * });\n *\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en'\n * });\n *\n * console.log(result.data.text);\n * ```\n *\n * @example With diarization (speaker identification)\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * }, {\n * language: 'en',\n * diarization: true, // Uses gpt-4o-transcribe-diarize model\n * metadata: {\n * model: 'gpt-4o-transcribe-diarize'\n * }\n * });\n *\n * console.log('Speakers:', result.data.speakers);\n * console.log('Utterances:', result.data.utterances);\n * ```\n *\n * @example With word timestamps and custom model\n * ```typescript\n * const result = await adapter.transcribe(audio, {\n * language: 'en',\n * wordTimestamps: true,\n * metadata: {\n * model: 'gpt-4o-transcribe', // More accurate than whisper-1\n * temperature: 0.2, // Lower temperature for more focused output\n * prompt: 'Expect technical terminology related to AI and machine learning'\n * }\n * });\n *\n * console.log('Words:', result.data.words);\n * ```\n *\n * @example With known speakers for improved diarization\n * ```typescript\n * const result = await adapter.transcribe(audio, {\n * language: 'en',\n * diarization: true,\n * metadata: {\n * model: 'gpt-4o-transcribe-diarize',\n * knownSpeakerNames: ['customer', 'agent'],\n * knownSpeakerReferences: [\n * 'data:audio/wav;base64,...', // Customer voice sample\n * 'data:audio/wav;base64,...' // Agent voice sample\n * ]\n * }\n * });\n *\n * // Speakers will be labeled as 'customer' and 'agent' instead of 'A' and 'B'\n * console.log('Speakers:', result.data.speakers);\n * ```\n */\nexport class OpenAIWhisperAdapter extends BaseAdapter {\n readonly name = \"openai-whisper\" as const\n readonly capabilities: ProviderCapabilities = {\n streaming: false, // Synchronous only (no streaming API for transcription)\n diarization: true, // Available with gpt-4o-transcribe-diarize model\n wordTimestamps: true,\n languageDetection: false, // Language should be provided for best accuracy\n customVocabulary: false, // Uses prompt instead\n summarization: false,\n sentimentAnalysis: false,\n entityDetection: false,\n piiRedaction: false\n }\n\n protected baseUrl = \"https://api.openai.com/v1\"\n\n /**\n * Get axios config for generated API client functions\n * Configures headers and base URL using Bearer token authorization\n */\n protected getAxiosConfig() {\n return super.getAxiosConfig(\"Authorization\", (apiKey) => `Bearer ${apiKey}`)\n }\n\n /**\n * Submit audio for transcription\n *\n * OpenAI Whisper API processes audio synchronously and returns results immediately.\n * Supports multiple models with different capabilities:\n * - whisper-1: Open source Whisper V2 model\n * - gpt-4o-transcribe: More accurate GPT-4o based transcription\n * - gpt-4o-mini-transcribe: Faster, cost-effective GPT-4o mini\n * - gpt-4o-transcribe-diarize: GPT-4o with speaker diarization\n *\n * @param audio - Audio input (URL or Buffer)\n * @param options - Transcription options\n * @returns Transcription response with full results\n */\n async transcribe(\n audio: AudioInput,\n options?: TranscribeOptions\n ): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Fetch audio if URL provided\n let audioData: Buffer | Blob\n let fileName = \"audio.mp3\"\n\n if (audio.type === \"url\") {\n const response = await axios.get(audio.url, {\n responseType: \"arraybuffer\"\n })\n audioData = Buffer.from(response.data)\n\n // Extract filename from URL if possible\n const urlPath = new URL(audio.url).pathname\n const extractedName = urlPath.split(\"/\").pop()\n if (extractedName) {\n fileName = extractedName\n }\n } else if (audio.type === \"file\") {\n audioData = audio.file\n fileName = audio.filename || fileName\n } else {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"INVALID_INPUT\",\n message: \"OpenAI Whisper only supports URL and File audio input (not stream)\"\n }\n }\n }\n\n // Determine model based on options\n const model = this.selectModel(options)\n\n // Set response format based on requirements\n const isDiarization = model === \"gpt-4o-transcribe-diarize\"\n const needsWords = options?.wordTimestamps === true\n\n // Build typed request using generated types\n const request: CreateTranscriptionRequest = {\n file: audioData as any, // Generated type expects Blob\n model: model as AudioTranscriptionModel\n }\n\n // Add optional parameters\n if (options?.language) {\n request.language = options.language\n }\n\n if (options?.metadata?.prompt) {\n request.prompt = options.metadata.prompt as string\n }\n\n if (options?.metadata?.temperature !== undefined) {\n request.temperature = options.metadata.temperature as number\n }\n\n if (isDiarization) {\n // Diarization model returns diarized_json format\n request.response_format = \"diarized_json\"\n\n // Add known speakers if provided\n if (options?.metadata?.knownSpeakerNames) {\n request.known_speaker_names = options.metadata.knownSpeakerNames as string[]\n }\n\n if (options?.metadata?.knownSpeakerReferences) {\n request.known_speaker_references = options.metadata.knownSpeakerReferences as string[]\n }\n } else if (needsWords || options?.diarization) {\n // Use verbose_json for word timestamps\n request.response_format = \"verbose_json\"\n\n // Add timestamp granularities\n if (needsWords) {\n request.timestamp_granularities = [\"word\", \"segment\"]\n }\n } else {\n // Simple json format for basic transcription\n request.response_format = \"json\"\n }\n\n // Use generated API client function - FULLY TYPED!\n const response = await createTranscription(request, this.getAxiosConfig())\n\n return this.normalizeResponse(response.data as any, model, isDiarization)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * OpenAI Whisper returns results synchronously, so getTranscript is not needed.\n * This method exists for interface compatibility but will return an error.\n */\n async getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse> {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"NOT_SUPPORTED\",\n message:\n \"OpenAI Whisper processes transcriptions synchronously. Use transcribe() method directly.\"\n }\n }\n }\n\n /**\n * Select appropriate model based on transcription options\n */\n private selectModel(options?: TranscribeOptions): AudioTranscriptionModel {\n // Use model from metadata if provided\n if (options?.metadata?.model) {\n return options.metadata.model as AudioTranscriptionModel\n }\n\n // Auto-select based on diarization requirement\n if (options?.diarization) {\n return \"gpt-4o-transcribe-diarize\"\n }\n\n // Default to gpt-4o-transcribe (better accuracy than whisper-1)\n return \"gpt-4o-transcribe\"\n }\n\n /**\n * Normalize OpenAI response to unified format\n */\n private normalizeResponse(\n response:\n | CreateTranscriptionResponseVerboseJson\n | CreateTranscriptionResponseDiarizedJson\n | { text: string },\n model: AudioTranscriptionModel,\n isDiarization: boolean\n ): UnifiedTranscriptResponse {\n // Handle simple json format\n if (\"text\" in response && Object.keys(response).length === 1) {\n return {\n success: true,\n provider: this.name,\n data: {\n id: `openai-${Date.now()}`,\n text: response.text,\n status: \"completed\",\n language: undefined,\n confidence: undefined\n },\n raw: response\n }\n }\n\n // Handle diarized format\n if (isDiarization && \"segments\" in response) {\n const diarizedResponse = response as CreateTranscriptionResponseDiarizedJson\n\n // Extract unique speakers\n const speakerSet = new Set(diarizedResponse.segments.map((seg) => seg.speaker))\n const speakers = Array.from(speakerSet).map((speaker) => ({\n id: speaker,\n label: speaker // Already labeled by OpenAI (A, B, C or custom names)\n }))\n\n // Build utterances from segments\n const utterances = diarizedResponse.segments.map((segment) => ({\n speaker: segment.speaker,\n text: segment.text,\n start: segment.start,\n end: segment.end,\n confidence: undefined\n }))\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: `openai-${Date.now()}`,\n text: diarizedResponse.text,\n status: \"completed\",\n language: undefined,\n duration: diarizedResponse.duration,\n speakers,\n utterances\n },\n raw: response\n }\n }\n\n // Handle verbose format\n if (\"duration\" in response && \"language\" in response) {\n const verboseResponse = response as CreateTranscriptionResponseVerboseJson\n\n // Extract words if available\n const words = verboseResponse.words?.map((word) => ({\n text: word.word,\n start: word.start,\n end: word.end,\n confidence: undefined\n }))\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: `openai-${Date.now()}`,\n text: verboseResponse.text,\n status: \"completed\",\n language: verboseResponse.language,\n duration: verboseResponse.duration,\n words\n },\n raw: response\n }\n }\n\n // Fallback (shouldn't reach here)\n return {\n success: true,\n provider: this.name,\n data: {\n id: `openai-${Date.now()}`,\n text: \"text\" in response ? response.text : \"\",\n status: \"completed\"\n },\n raw: response\n }\n }\n}\n\n/**\n * Factory function to create an OpenAI Whisper adapter\n */\nexport function createOpenAIWhisperAdapter(config: ProviderConfig): OpenAIWhisperAdapter {\n const adapter = new OpenAIWhisperAdapter()\n adapter.initialize(config)\n return adapter\n}\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * OpenAI API\n * The OpenAI REST API. Please see https://platform.openai.com/docs/api-reference for more details.\n * OpenAPI spec version: 2.3.0\n */\n\nimport type { AxiosRequestConfig, AxiosResponse } from \"axios\"\nimport axios from \"axios\"\n\nimport type {\n CreateFileRequest,\n CreateSpeechRequest,\n CreateSpeechResponseStreamEvent,\n CreateTranscription200One,\n CreateTranscriptionRequest,\n CreateTranscriptionResponseStreamEvent,\n CreateTranslation200,\n CreateTranslationRequest,\n DeleteFileResponse,\n DeleteModelResponse,\n ListFilesParams,\n ListFilesResponse,\n ListModelsResponse,\n Model,\n OpenAIFile\n} from \"../schema\"\n\n/**\n * Generates audio from the input text.\n * @summary Create speech\n */\nexport const createSpeech = <TData = AxiosResponse<Blob | CreateSpeechResponseStreamEvent>>(\n createSpeechRequest: CreateSpeechRequest,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/audio/speech\", createSpeechRequest, options)\n}\n\n/**\n * Transcribes audio into the input language.\n * @summary Create transcription\n */\nexport const createTranscription = <\n TData = AxiosResponse<CreateTranscription200One | CreateTranscriptionResponseStreamEvent>\n>(\n createTranscriptionRequest: CreateTranscriptionRequest,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n const formData = new FormData()\n formData.append(\"file\", createTranscriptionRequest.file)\n formData.append(\"model\", createTranscriptionRequest.model)\n if (createTranscriptionRequest.language !== undefined) {\n formData.append(\"language\", createTranscriptionRequest.language)\n }\n if (createTranscriptionRequest.prompt !== undefined) {\n formData.append(\"prompt\", createTranscriptionRequest.prompt)\n }\n if (createTranscriptionRequest.response_format !== undefined) {\n formData.append(\"response_format\", createTranscriptionRequest.response_format)\n }\n if (createTranscriptionRequest.temperature !== undefined) {\n formData.append(\"temperature\", createTranscriptionRequest.temperature.toString())\n }\n if (createTranscriptionRequest.include !== undefined) {\n createTranscriptionRequest.include.forEach((value) => formData.append(\"include\", value))\n }\n if (createTranscriptionRequest.timestamp_granularities !== undefined) {\n createTranscriptionRequest.timestamp_granularities.forEach((value) =>\n formData.append(\"timestamp_granularities\", value)\n )\n }\n if (\n createTranscriptionRequest.stream !== undefined &&\n createTranscriptionRequest.stream !== null\n ) {\n formData.append(\"stream\", createTranscriptionRequest.stream.toString())\n }\n if (\n createTranscriptionRequest.chunking_strategy !== undefined &&\n createTranscriptionRequest.chunking_strategy !== null\n ) {\n formData.append(\n \"chunking_strategy\",\n typeof createTranscriptionRequest.chunking_strategy === \"object\"\n ? JSON.stringify(createTranscriptionRequest.chunking_strategy)\n : createTranscriptionRequest.chunking_strategy\n )\n }\n if (createTranscriptionRequest.known_speaker_names !== undefined) {\n createTranscriptionRequest.known_speaker_names.forEach((value) =>\n formData.append(\"known_speaker_names\", value)\n )\n }\n if (createTranscriptionRequest.known_speaker_references !== undefined) {\n createTranscriptionRequest.known_speaker_references.forEach((value) =>\n formData.append(\"known_speaker_references\", value)\n )\n }\n\n return axios.post(\"/audio/transcriptions\", formData, options)\n}\n\n/**\n * Translates audio into English.\n * @summary Create translation\n */\nexport const createTranslation = <TData = AxiosResponse<CreateTranslation200>>(\n createTranslationRequest: CreateTranslationRequest,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n const formData = new FormData()\n formData.append(\"file\", createTranslationRequest.file)\n formData.append(\"model\", createTranslationRequest.model)\n if (createTranslationRequest.prompt !== undefined) {\n formData.append(\"prompt\", createTranslationRequest.prompt)\n }\n if (createTranslationRequest.response_format !== undefined) {\n formData.append(\"response_format\", createTranslationRequest.response_format)\n }\n if (createTranslationRequest.temperature !== undefined) {\n formData.append(\"temperature\", createTranslationRequest.temperature.toString())\n }\n\n return axios.post(\"/audio/translations\", formData, options)\n}\n\n/**\n * Returns a list of files.\n * @summary List files\n */\nexport const listFiles = <TData = AxiosResponse<ListFilesResponse>>(\n params?: ListFilesParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/files\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * Upload a file that can be used across various endpoints. Individual files\ncan be up to 512 MB, and the size of all files uploaded by one organization\ncan be up to 1 TB.\n\n- The Assistants API supports files up to 2 million tokens and of specific\n file types. See the [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) for\n details.\n- The Fine-tuning API only supports `.jsonl` files. The input also has\n certain required formats for fine-tuning\n [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input) or\n [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) models.\n- The Batch API only supports `.jsonl` files up to 200 MB in size. The input\n also has a specific required\n [format](https://platform.openai.com/docs/api-reference/batch/request-input).\n\nPlease [contact us](https://help.openai.com/) if you need to increase these\nstorage limits.\n\n * @summary Upload file\n */\nexport const createFile = <TData = AxiosResponse<OpenAIFile>>(\n createFileRequest: CreateFileRequest,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n const formData = new FormData()\n formData.append(\"file\", createFileRequest.file)\n formData.append(\"purpose\", createFileRequest.purpose)\n if (createFileRequest.expires_after !== undefined) {\n formData.append(\"expires_after\", JSON.stringify(createFileRequest.expires_after))\n }\n\n return axios.post(\"/files\", formData, options)\n}\n\n/**\n * Delete a file and remove it from all vector stores.\n * @summary Delete file\n */\nexport const deleteFile = <TData = AxiosResponse<DeleteFileResponse>>(\n fileId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/files/${fileId}`, options)\n}\n\n/**\n * Returns information about a specific file.\n * @summary Retrieve file\n */\nexport const retrieveFile = <TData = AxiosResponse<OpenAIFile>>(\n fileId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/files/${fileId}`, options)\n}\n\n/**\n * Returns the contents of the specified file.\n * @summary Retrieve file content\n */\nexport const downloadFile = <TData = AxiosResponse<string>>(\n fileId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/files/${fileId}/content`, options)\n}\n\n/**\n * Lists the currently available models, and provides basic information about each one such as the owner and availability.\n * @summary List models\n */\nexport const listModels = <TData = AxiosResponse<ListModelsResponse>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/models\", options)\n}\n\n/**\n * Retrieves a model instance, providing basic information about the model such as the owner and permissioning.\n * @summary Retrieve model\n */\nexport const retrieveModel = <TData = AxiosResponse<Model>>(\n model: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/models/${model}`, options)\n}\n\n/**\n * Delete a fine-tuned model. You must have the Owner role in your organization to delete a model.\n * @summary Delete a fine-tuned model\n */\nexport const deleteModel = <TData = AxiosResponse<DeleteModelResponse>>(\n model: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/models/${model}`, options)\n}\n\nexport type CreateSpeechResult = AxiosResponse<Blob | CreateSpeechResponseStreamEvent>\nexport type CreateTranscriptionResult = AxiosResponse<\n CreateTranscription200One | CreateTranscriptionResponseStreamEvent\n>\nexport type CreateTranslationResult = AxiosResponse<CreateTranslation200>\nexport type ListFilesResult = AxiosResponse<ListFilesResponse>\nexport type CreateFileResult = AxiosResponse<OpenAIFile>\nexport type DeleteFileResult = AxiosResponse<DeleteFileResponse>\nexport type RetrieveFileResult = AxiosResponse<OpenAIFile>\nexport type DownloadFileResult = AxiosResponse<string>\nexport type ListModelsResult = AxiosResponse<ListModelsResponse>\nexport type RetrieveModelResult = AxiosResponse<Model>\nexport type DeleteModelResult = AxiosResponse<DeleteModelResponse>\n","/**\n * Speechmatics transcription provider adapter\n * Documentation: https://docs.speechmatics.com/\n */\n\nimport axios, { type AxiosInstance } from \"axios\"\nimport type {\n AudioInput,\n ProviderCapabilities,\n TranscribeOptions,\n UnifiedTranscriptResponse\n} from \"../router/types\"\nimport { BaseAdapter, type ProviderConfig } from \"./base-adapter\"\n\n// Import Speechmatics types (manual definitions - OpenAPI spec doesn't match actual API)\nimport type {\n JobConfig,\n JobSubmitResponse,\n JobDetailsResponse,\n TranscriptionResponse\n} from \"../types/speechmatics\"\n\n/**\n * Speechmatics transcription provider adapter\n *\n * Implements transcription for Speechmatics API with support for:\n * - Batch transcription (async processing)\n * - Speaker diarization\n * - Enhanced accuracy models\n * - Multi-language support\n * - Sentiment analysis\n * - Summarization\n * - Custom vocabulary\n *\n * Note: Types are manually defined due to validation errors in the official OpenAPI spec.\n * See src/generated/speechmatics/schema/index.ts for type definitions.\n *\n * @see https://docs.speechmatics.com/ Speechmatics Documentation\n * @see https://docs.speechmatics.com/introduction/batch-guide Batch API Guide\n *\n * @example Basic transcription\n * ```typescript\n * import { SpeechmaticsAdapter } from '@meeting-baas/sdk';\n *\n * const adapter = new SpeechmaticsAdapter();\n * adapter.initialize({\n * apiKey: process.env.SPEECHMATICS_API_KEY\n * });\n *\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en'\n * });\n *\n * console.log(result.data.text);\n * ```\n *\n * @example With enhanced accuracy and diarization\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * }, {\n * language: 'en',\n * diarization: true,\n * metadata: {\n * operating_point: 'enhanced' // Higher accuracy model\n * }\n * });\n *\n * console.log('Speakers:', result.data.speakers);\n * console.log('Utterances:', result.data.utterances);\n * ```\n *\n * @example Async with polling\n * ```typescript\n * // Submit transcription\n * const submission = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en',\n * summarization: true\n * });\n *\n * const jobId = submission.data?.id;\n * console.log('Job ID:', jobId);\n *\n * // Poll for completion\n * const poll = async () => {\n * const status = await adapter.getTranscript(jobId);\n * if (status.data?.status === 'completed') {\n * console.log('Transcript:', status.data.text);\n * console.log('Summary:', status.data.summary);\n * } else if (status.data?.status === 'processing') {\n * setTimeout(poll, 3000);\n * }\n * };\n * await poll();\n * ```\n */\nexport class SpeechmaticsAdapter extends BaseAdapter {\n readonly name = \"speechmatics\" as const\n readonly capabilities: ProviderCapabilities = {\n streaming: false, // Batch only (streaming available via separate WebSocket API)\n diarization: true,\n wordTimestamps: true,\n languageDetection: false,\n customVocabulary: true,\n summarization: true,\n sentimentAnalysis: true,\n entityDetection: true,\n piiRedaction: false\n }\n\n private client?: AxiosInstance\n protected baseUrl = \"https://asr.api.speechmatics.com/v2\"\n\n initialize(config: ProviderConfig): void {\n super.initialize(config)\n\n this.baseUrl = config.baseUrl || this.baseUrl\n\n this.client = axios.create({\n baseURL: this.baseUrl,\n timeout: config.timeout || 120000,\n headers: {\n Authorization: `Bearer ${config.apiKey}`,\n ...config.headers\n }\n })\n }\n\n /**\n * Submit audio for transcription\n *\n * Speechmatics uses async batch processing. Returns a job ID immediately.\n * Poll getTranscript() to retrieve results.\n *\n * @param audio - Audio input (URL or file)\n * @param options - Transcription options\n * @returns Job submission response with ID for polling\n */\n async transcribe(\n audio: AudioInput,\n options?: TranscribeOptions\n ): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Build job config\n const jobConfig: JobConfig = {\n type: \"transcription\",\n transcription_config: {\n language: options?.language || \"en\",\n operating_point:\n (options?.metadata?.operating_point as \"standard\" | \"enhanced\") || \"standard\"\n }\n }\n\n // Add diarization if requested\n if (options?.diarization) {\n if (!jobConfig.transcription_config) {\n jobConfig.transcription_config = {}\n }\n jobConfig.transcription_config.diarization = \"speaker\"\n if (options.speakersExpected) {\n jobConfig.transcription_config.speaker_diarization_config = {\n max_speakers: options.speakersExpected\n }\n }\n }\n\n // Add sentiment analysis\n if (options?.sentimentAnalysis) {\n if (!jobConfig.transcription_config) {\n jobConfig.transcription_config = {}\n }\n jobConfig.transcription_config.enable_sentiment_analysis = true\n }\n\n // Add summarization\n if (options?.summarization && options?.metadata?.summary_type) {\n if (!jobConfig.transcription_config) {\n jobConfig.transcription_config = {}\n }\n jobConfig.transcription_config.summarization_config = {\n type: options.metadata.summary_type as \"bullets\" | \"brief\" | \"paragraph\",\n length: (options.metadata.summary_length as \"short\" | \"medium\" | \"long\") || \"medium\"\n }\n }\n\n // Add custom vocabulary\n if (options?.customVocabulary && options.customVocabulary.length > 0) {\n if (!jobConfig.transcription_config) {\n jobConfig.transcription_config = {}\n }\n jobConfig.transcription_config.additional_vocab = options.customVocabulary\n }\n\n // Handle audio input\n let requestBody: FormData | Record<string, any>\n let headers: Record<string, string> = {}\n\n if (audio.type === \"url\") {\n // Use fetch_data for URL input (JSON request)\n jobConfig.fetch_data = {\n url: audio.url\n }\n requestBody = { config: JSON.stringify(jobConfig) }\n headers = { \"Content-Type\": \"application/json\" }\n } else if (audio.type === \"file\") {\n // Upload file directly with multipart form\n requestBody = {\n config: JSON.stringify(jobConfig),\n data_file: audio.file\n }\n headers = { \"Content-Type\": \"multipart/form-data\" }\n } else {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"INVALID_INPUT\",\n message: \"Speechmatics only supports URL and File audio input\"\n }\n }\n }\n\n // Submit job\n const response = await this.client!.post<JobSubmitResponse>(\"/jobs\", requestBody, { headers })\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: response.data.id,\n text: \"\",\n status: \"queued\",\n createdAt: response.data.created_at\n },\n raw: response.data\n }\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Get transcription result by job ID\n *\n * Poll this method to check job status and retrieve completed transcription.\n *\n * @param transcriptId - Job ID from Speechmatics\n * @returns Transcription response with status and results\n */\n async getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Check job status first\n const statusResponse = await this.client!.get<JobDetailsResponse>(`/jobs/${transcriptId}`)\n\n const status = this.normalizeStatus(statusResponse.data.job.status)\n\n if (status !== \"completed\") {\n return {\n success: true,\n provider: this.name,\n data: {\n id: transcriptId,\n text: \"\",\n status,\n createdAt: statusResponse.data.job.created_at\n },\n raw: statusResponse.data\n }\n }\n\n // Get transcript if completed\n const transcriptResponse = await this.client!.get<TranscriptionResponse>(\n `/jobs/${transcriptId}/transcript`\n )\n\n return this.normalizeResponse(transcriptResponse.data)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Normalize Speechmatics status to unified status\n */\n private normalizeStatus(status: string): \"queued\" | \"processing\" | \"completed\" | \"error\" {\n switch (status) {\n case \"running\":\n return \"processing\"\n case \"done\":\n return \"completed\"\n case \"rejected\":\n case \"expired\":\n return \"error\"\n default:\n return \"queued\"\n }\n }\n\n /**\n * Normalize Speechmatics response to unified format\n */\n private normalizeResponse(response: TranscriptionResponse): UnifiedTranscriptResponse {\n // Extract full text from results\n const text = response.results\n .filter((r) => r.type === \"word\" && r.alternatives)\n .map((r) => r.alternatives![0]?.content || \"\")\n .join(\" \")\n\n // Extract words with timestamps (filter out items without required timestamps)\n const words = response.results\n .filter((r) => r.type === \"word\" && r.start_time !== undefined && r.end_time !== undefined)\n .map((result) => ({\n text: result.alternatives?.[0]?.content || \"\",\n start: result.start_time!,\n end: result.end_time!,\n confidence: result.alternatives?.[0]?.confidence,\n speaker: result.alternatives?.[0]?.speaker\n }))\n\n // Extract speakers if diarization was enabled\n const speakerSet = new Set<string>()\n response.results.forEach((r) => {\n if (r.alternatives) {\n const speaker = r.alternatives[0]?.speaker\n if (speaker) speakerSet.add(speaker)\n }\n })\n\n const speakers =\n speakerSet.size > 0\n ? Array.from(speakerSet).map((id) => ({\n id,\n label: `Speaker ${id}`\n }))\n : undefined\n\n // Build utterances from speaker changes\n const utterances: Array<{\n speaker: string\n text: string\n start: number\n end: number\n }> = []\n\n if (speakers) {\n let currentSpeaker: string | undefined\n let currentUtterance: string[] = []\n let utteranceStart = 0\n\n response.results\n .filter((r) => r.type === \"word\" && r.alternatives)\n .forEach((result, idx) => {\n const speaker = result.alternatives![0]?.speaker\n const word = result.alternatives![0]?.content || \"\"\n\n if (speaker !== currentSpeaker) {\n // Speaker changed - save previous utterance\n if (currentSpeaker && currentUtterance.length > 0) {\n const prevResult = response.results.filter((r) => r.type === \"word\")[idx - 1]\n utterances.push({\n speaker: currentSpeaker,\n text: currentUtterance.join(\" \"),\n start: utteranceStart || 0,\n end: prevResult?.end_time || result.start_time || 0\n })\n }\n\n // Start new utterance\n currentSpeaker = speaker\n currentUtterance = [word]\n utteranceStart = result.start_time || 0\n } else {\n currentUtterance.push(word)\n }\n })\n\n // Add final utterance\n if (currentSpeaker && currentUtterance.length > 0) {\n const lastWord = response.results.filter((r) => r.type === \"word\").pop()\n utterances.push({\n speaker: currentSpeaker,\n text: currentUtterance.join(\" \"),\n start: utteranceStart,\n end: lastWord?.end_time || utteranceStart\n })\n }\n }\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: response.job.id,\n text,\n status: \"completed\",\n language: response.metadata.transcription_config?.language,\n duration: response.job.duration,\n speakers,\n words: words.length > 0 ? words : undefined,\n utterances: utterances.length > 0 ? utterances : undefined,\n summary: response.summary?.content,\n createdAt: response.job.created_at\n },\n raw: response\n }\n }\n}\n\n/**\n * Factory function to create a Speechmatics adapter\n */\nexport function createSpeechmaticsAdapter(config: ProviderConfig): SpeechmaticsAdapter {\n const adapter = new SpeechmaticsAdapter()\n adapter.initialize(config)\n return adapter\n}\n","/**\n * Base webhook handler interface\n * All provider-specific webhook handlers must implement this\n */\n\nimport type { UnifiedWebhookEvent, WebhookValidation, WebhookVerificationOptions } from \"./types\"\nimport type { TranscriptionProvider } from \"../router/types\"\n\n/**\n * Abstract base class for webhook handlers\n *\n * Each provider implements this to parse and normalize their webhook payloads\n */\nexport abstract class BaseWebhookHandler {\n /** Provider name */\n abstract readonly provider: TranscriptionProvider\n\n /**\n * Check if this payload matches this provider's webhook format\n *\n * Used for auto-detection of webhook provider\n *\n * @param payload - Raw webhook payload\n * @param options - Optional context (query params, headers, etc.)\n * @returns true if this handler can process the payload\n *\n * @example\n * ```typescript\n * matches(payload, options) {\n * return typeof payload === 'object' &&\n * 'event' in payload &&\n * 'payload' in payload\n * }\n * ```\n */\n abstract matches(\n payload: unknown,\n options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): boolean\n\n /**\n * Parse and normalize webhook payload\n *\n * Converts provider-specific webhook format to UnifiedWebhookEvent\n *\n * @param payload - Raw webhook payload\n * @param options - Optional context (query params, headers, etc.)\n * @returns Normalized webhook event\n * @throws Error if payload cannot be parsed\n *\n * @example\n * ```typescript\n * parse(payload, options) {\n * const typed = payload as ProviderWebhookPayload\n * return {\n * success: true,\n * provider: this.provider,\n * eventType: 'transcription.completed',\n * data: { id: typed.job_id, ... },\n * timestamp: new Date().toISOString(),\n * raw: payload\n * }\n * }\n * ```\n */\n abstract parse(\n payload: unknown,\n options?: { queryParams?: Record<string, string> }\n ): UnifiedWebhookEvent\n\n /**\n * Verify webhook signature (if provider supports it)\n *\n * Optional method - implement if provider supports webhook signature verification\n *\n * @param payload - Raw webhook payload\n * @param options - Verification options (signature, secret, etc.)\n * @returns true if signature is valid\n *\n * @example\n * ```typescript\n * verify(payload, options) {\n * if (!options.signature || !options.secret) return false\n *\n * const computed = crypto\n * .createHmac('sha256', options.secret)\n * .update(JSON.stringify(payload))\n * .digest('hex')\n *\n * return computed === options.signature\n * }\n * ```\n */\n verify?(payload: unknown, options: WebhookVerificationOptions): boolean\n\n /**\n * Validate webhook payload structure\n *\n * Checks if payload has required fields and correct types\n *\n * @param payload - Raw webhook payload\n * @param options - Optional context (query params, headers, etc.)\n * @returns Validation result with details\n */\n validate(\n payload: unknown,\n options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): WebhookValidation {\n try {\n // Check if this handler matches the payload\n if (!this.matches(payload, options)) {\n return {\n valid: false,\n error: `Payload does not match ${this.provider} webhook format`\n }\n }\n\n // Try to parse the payload\n const event = this.parse(payload, options)\n\n // Basic validation\n if (!event.provider || !event.eventType) {\n return {\n valid: false,\n error: \"Parsed event missing required fields\"\n }\n }\n\n return {\n valid: true,\n provider: this.provider,\n details: {\n eventType: event.eventType,\n success: event.success\n }\n }\n } catch (error) {\n return {\n valid: false,\n error: error instanceof Error ? error.message : \"Unknown error\",\n details: { error }\n }\n }\n }\n\n /**\n * Helper method to create error response\n */\n protected createErrorEvent(payload: unknown, errorMessage: string): UnifiedWebhookEvent {\n return {\n success: false,\n provider: this.provider,\n eventType: \"transcription.failed\",\n data: {\n id: \"\",\n status: \"error\",\n error: errorMessage\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n}\n","/**\n * Gladia webhook handler\n * Parses and normalizes Gladia webhook callbacks\n */\n\nimport type { WebhookTranscriptionSuccessPayload } from \"../generated/gladia/schema/webhookTranscriptionSuccessPayload\"\nimport type { WebhookTranscriptionErrorPayload } from \"../generated/gladia/schema/webhookTranscriptionErrorPayload\"\nimport type { WebhookTranscriptionCreatedPayload } from \"../generated/gladia/schema/webhookTranscriptionCreatedPayload\"\nimport { BaseWebhookHandler } from \"./base-webhook\"\nimport type { UnifiedWebhookEvent } from \"./types\"\nimport type { TranscriptionProvider } from \"../router/types\"\n\n/**\n * Gladia webhook handler\n *\n * Handles webhook callbacks from Gladia API:\n * - transcription.created - Job created and queued\n * - transcription.success - Job completed successfully\n * - transcription.error - Job failed with error\n *\n * @example\n * ```typescript\n * import { GladiaWebhookHandler } from '@meeting-baas/sdk';\n *\n * const handler = new GladiaWebhookHandler();\n *\n * // Validate webhook\n * const validation = handler.validate(req.body);\n * if (!validation.valid) {\n * return res.status(400).json({ error: validation.error });\n * }\n *\n * // Parse webhook\n * const event = handler.parse(req.body);\n * console.log('Event type:', event.eventType);\n * console.log('Job ID:', event.data?.id);\n *\n * if (event.eventType === 'transcription.completed') {\n * console.log('Transcript:', event.data?.text);\n * }\n * ```\n */\nexport class GladiaWebhookHandler extends BaseWebhookHandler {\n readonly provider: TranscriptionProvider = \"gladia\"\n\n /**\n * Check if payload matches Gladia webhook format\n */\n matches(\n payload: unknown,\n _options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): boolean {\n if (!payload || typeof payload !== \"object\") {\n return false\n }\n\n const obj = payload as Record<string, unknown>\n\n // Gladia webhooks have \"event\" and \"payload\" fields\n if (!(\"event\" in obj) || !(\"payload\" in obj)) {\n return false\n }\n\n // Event should be a string starting with \"transcription.\"\n if (typeof obj.event !== \"string\") {\n return false\n }\n\n if (!obj.event.startsWith(\"transcription.\")) {\n return false\n }\n\n // Payload should be an object with \"id\" field\n if (!obj.payload || typeof obj.payload !== \"object\") {\n return false\n }\n\n const payloadObj = obj.payload as Record<string, unknown>\n return typeof payloadObj.id === \"string\"\n }\n\n /**\n * Parse Gladia webhook payload to unified format\n */\n parse(\n payload: unknown,\n _options?: { queryParams?: Record<string, string> }\n ): UnifiedWebhookEvent {\n if (!this.matches(payload)) {\n return this.createErrorEvent(payload, \"Invalid Gladia webhook payload\")\n }\n\n const webhookPayload = payload as\n | WebhookTranscriptionSuccessPayload\n | WebhookTranscriptionErrorPayload\n | WebhookTranscriptionCreatedPayload\n\n const jobId = webhookPayload.payload.id\n const event = webhookPayload.event\n\n // Handle different event types\n if (event === \"transcription.created\") {\n return {\n success: true,\n provider: this.provider,\n eventType: \"transcription.created\",\n data: {\n id: jobId,\n status: \"queued\"\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n if (event === \"transcription.success\") {\n // For success events, we need to fetch the full result\n // The webhook only contains the job ID, not the transcript\n return {\n success: true,\n provider: this.provider,\n eventType: \"transcription.completed\",\n data: {\n id: jobId,\n status: \"completed\"\n // Note: Full transcript data needs to be fetched via API\n // using GladiaAdapter.getTranscript(jobId)\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n if (event === \"transcription.error\") {\n return {\n success: false,\n provider: this.provider,\n eventType: \"transcription.failed\",\n data: {\n id: jobId,\n status: \"error\",\n error: \"Transcription failed\"\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n // Unknown event type\n return this.createErrorEvent(payload, `Unknown Gladia webhook event: ${event}`)\n }\n\n /**\n * Verify Gladia webhook signature\n *\n * Note: As of the current API version, Gladia does not provide\n * webhook signature verification. This method is a placeholder\n * for future implementation.\n *\n * @param payload - Webhook payload\n * @param options - Verification options\n * @returns Always returns true (no verification available)\n */\n verify(): boolean {\n // Gladia does not currently support webhook signature verification\n // Return true to indicate no verification is required\n return true\n }\n}\n\n/**\n * Factory function to create a Gladia webhook handler\n */\nexport function createGladiaWebhookHandler(): GladiaWebhookHandler {\n return new GladiaWebhookHandler()\n}\n","/**\n * AssemblyAI webhook handler\n * Parses and normalizes AssemblyAI webhook callbacks\n */\n\nimport type { TranscriptReadyNotification } from \"../generated/assemblyai/schema/transcriptReadyNotification\"\nimport { BaseWebhookHandler } from \"./base-webhook\"\nimport type { UnifiedWebhookEvent, WebhookVerificationOptions } from \"./types\"\nimport type { TranscriptionProvider } from \"../router/types\"\nimport crypto from \"node:crypto\"\n\n/**\n * AssemblyAI webhook handler\n *\n * Handles webhook callbacks from AssemblyAI API:\n * - completed - Transcription completed successfully\n * - error - Transcription failed with error\n *\n * AssemblyAI supports webhook signature verification using HMAC-SHA256.\n *\n * @example Basic usage\n * ```typescript\n * import { AssemblyAIWebhookHandler } from '@meeting-baas/sdk';\n *\n * const handler = new AssemblyAIWebhookHandler();\n *\n * // Validate webhook\n * const validation = handler.validate(req.body);\n * if (!validation.valid) {\n * return res.status(400).json({ error: validation.error });\n * }\n *\n * // Parse webhook\n * const event = handler.parse(req.body);\n * if (event.eventType === 'transcription.completed') {\n * console.log('Transcript ID:', event.data?.id);\n * }\n * ```\n *\n * @example With signature verification\n * ```typescript\n * // Verify webhook signature\n * const isValid = handler.verify(req.body, {\n * signature: req.headers['x-assemblyai-signature'],\n * secret: process.env.ASSEMBLYAI_WEBHOOK_SECRET,\n * rawBody: req.rawBody\n * });\n *\n * if (!isValid) {\n * return res.status(401).json({ error: 'Invalid signature' });\n * }\n * ```\n */\nexport class AssemblyAIWebhookHandler extends BaseWebhookHandler {\n readonly provider: TranscriptionProvider = \"assemblyai\"\n\n /**\n * Check if payload matches AssemblyAI webhook format\n */\n matches(\n payload: unknown,\n _options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): boolean {\n if (!payload || typeof payload !== \"object\") {\n return false\n }\n\n const obj = payload as Record<string, unknown>\n\n // AssemblyAI webhooks have \"transcript_id\" and \"status\" fields\n if (!(\"transcript_id\" in obj) || !(\"status\" in obj)) {\n return false\n }\n\n // transcript_id should be a string\n if (typeof obj.transcript_id !== \"string\") {\n return false\n }\n\n // status should be \"completed\" or \"error\"\n if (obj.status !== \"completed\" && obj.status !== \"error\") {\n return false\n }\n\n return true\n }\n\n /**\n * Parse AssemblyAI webhook payload to unified format\n */\n parse(\n payload: unknown,\n _options?: { queryParams?: Record<string, string> }\n ): UnifiedWebhookEvent {\n if (!this.matches(payload)) {\n return this.createErrorEvent(payload, \"Invalid AssemblyAI webhook payload\")\n }\n\n const notification = payload as TranscriptReadyNotification\n const transcriptId = notification.transcript_id\n const status = notification.status\n\n if (status === \"completed\") {\n return {\n success: true,\n provider: this.provider,\n eventType: \"transcription.completed\",\n data: {\n id: transcriptId,\n status: \"completed\"\n // Note: Full transcript data needs to be fetched via API\n // using AssemblyAIAdapter.getTranscript(transcriptId)\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n if (status === \"error\") {\n return {\n success: false,\n provider: this.provider,\n eventType: \"transcription.failed\",\n data: {\n id: transcriptId,\n status: \"error\",\n error: \"Transcription failed\"\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n // Unknown status\n return this.createErrorEvent(payload, `Unknown AssemblyAI status: ${status}`)\n }\n\n /**\n * Verify AssemblyAI webhook signature\n *\n * AssemblyAI uses HMAC-SHA256 for webhook signature verification.\n * The signature is sent in the X-AssemblyAI-Signature header.\n *\n * @param payload - Webhook payload\n * @param options - Verification options with signature and secret\n * @returns true if signature is valid\n *\n * @example\n * ```typescript\n * const isValid = handler.verify(req.body, {\n * signature: req.headers['x-assemblyai-signature'],\n * secret: process.env.ASSEMBLYAI_WEBHOOK_SECRET,\n * rawBody: req.rawBody // Raw request body as string or Buffer\n * });\n * ```\n */\n verify(payload: unknown, options: WebhookVerificationOptions): boolean {\n // Need signature and secret to verify\n if (!options.signature || !options.secret) {\n return false\n }\n\n try {\n // Use raw body if provided, otherwise stringify payload\n const body =\n options.rawBody || (typeof payload === \"string\" ? payload : JSON.stringify(payload))\n\n // Compute HMAC-SHA256 signature\n const hmac = crypto.createHmac(\"sha256\", options.secret)\n const bodyBuffer = typeof body === \"string\" ? Buffer.from(body) : body\n hmac.update(bodyBuffer)\n const computedSignature = hmac.digest(\"hex\")\n\n // Compare signatures (constant-time comparison)\n return crypto.timingSafeEqual(Buffer.from(options.signature), Buffer.from(computedSignature))\n } catch (error) {\n // If any error occurs during verification, treat as invalid\n return false\n }\n }\n}\n\n/**\n * Factory function to create an AssemblyAI webhook handler\n */\nexport function createAssemblyAIWebhookHandler(): AssemblyAIWebhookHandler {\n return new AssemblyAIWebhookHandler()\n}\n","/**\n * Deepgram webhook handler\n * Parses and normalizes Deepgram webhook callbacks\n */\n\nimport type { ListenV1Response } from \"../generated/deepgram/schema/listenV1Response\"\nimport { BaseWebhookHandler } from \"./base-webhook\"\nimport type { UnifiedWebhookEvent } from \"./types\"\nimport type { TranscriptionProvider } from \"../router/types\"\n\n/**\n * Deepgram webhook handler\n *\n * Handles webhook callbacks from Deepgram API.\n * Deepgram sends the full transcription response to the callback URL\n * when transcription is complete.\n *\n * Note: Deepgram does not provide webhook signature verification.\n * For security, use HTTPS and validate the request source.\n *\n * @example Basic usage\n * ```typescript\n * import { DeepgramWebhookHandler } from '@meeting-baas/sdk';\n *\n * const handler = new DeepgramWebhookHandler();\n *\n * // Validate webhook\n * const validation = handler.validate(req.body);\n * if (!validation.valid) {\n * return res.status(400).json({ error: validation.error });\n * }\n *\n * // Parse webhook\n * const event = handler.parse(req.body);\n * console.log('Event type:', event.eventType);\n * console.log('Transcript:', event.data?.text);\n * console.log('Speakers:', event.data?.speakers);\n * ```\n *\n * @example Processing completed transcription\n * ```typescript\n * const event = handler.parse(req.body);\n *\n * if (event.eventType === 'transcription.completed') {\n * console.log('Request ID:', event.data?.id);\n * console.log('Transcript:', event.data?.text);\n * console.log('Duration:', event.data?.duration);\n * console.log('Confidence:', event.data?.confidence);\n *\n * // Access word-level timestamps\n * event.data?.words?.forEach(word => {\n * console.log(`${word.text}: ${word.start}s - ${word.end}s`);\n * });\n *\n * // Access speaker diarization\n * event.data?.speakers?.forEach(speaker => {\n * console.log(`Speaker ${speaker.speaker}: ${speaker.text}`);\n * });\n * }\n * ```\n */\nexport class DeepgramWebhookHandler extends BaseWebhookHandler {\n readonly provider: TranscriptionProvider = \"deepgram\"\n\n /**\n * Check if payload matches Deepgram webhook format\n */\n matches(\n payload: unknown,\n _options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): boolean {\n if (!payload || typeof payload !== \"object\") {\n return false\n }\n\n const obj = payload as Record<string, unknown>\n\n // Deepgram callbacks have \"metadata\" and \"results\" fields\n if (!(\"metadata\" in obj) || !(\"results\" in obj)) {\n return false\n }\n\n // metadata should be an object with \"request_id\"\n if (!obj.metadata || typeof obj.metadata !== \"object\") {\n return false\n }\n\n const metadata = obj.metadata as Record<string, unknown>\n if (!(\"request_id\" in metadata)) {\n return false\n }\n\n // results should be an object with \"channels\"\n if (!obj.results || typeof obj.results !== \"object\") {\n return false\n }\n\n const results = obj.results as Record<string, unknown>\n return \"channels\" in results\n }\n\n /**\n * Parse Deepgram webhook payload to unified format\n */\n parse(\n payload: unknown,\n _options?: { queryParams?: Record<string, string> }\n ): UnifiedWebhookEvent {\n if (!this.matches(payload)) {\n return this.createErrorEvent(payload, \"Invalid Deepgram webhook payload\")\n }\n\n const response = payload as ListenV1Response\n\n try {\n // Extract basic info\n const requestId = response.metadata.request_id\n const duration = response.metadata.duration\n const channels = response.results.channels || []\n\n // Deepgram can have multiple channels, we'll use the first one\n if (channels.length === 0) {\n return {\n success: false,\n provider: this.provider,\n eventType: \"transcription.failed\",\n data: {\n id: requestId || \"\",\n status: \"error\",\n error: \"No channels in response\"\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n const channel = channels[0]\n const alternatives = channel.alternatives || []\n\n if (alternatives.length === 0) {\n return {\n success: false,\n provider: this.provider,\n eventType: \"transcription.failed\",\n data: {\n id: requestId || \"\",\n status: \"error\",\n error: \"No alternatives in response\"\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n const alternative = alternatives[0]\n const transcript = alternative.transcript\n\n // Check if transcription was successful\n if (!transcript) {\n return {\n success: false,\n provider: this.provider,\n eventType: \"transcription.failed\",\n data: {\n id: requestId || \"\",\n status: \"error\",\n error: \"Empty transcript\"\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n // Extract words (if available)\n const words =\n alternative.words && alternative.words.length > 0\n ? alternative.words.map((word) => ({\n text: word.word || \"\",\n start: word.start || 0,\n end: word.end || 0,\n confidence: word.confidence\n }))\n : undefined\n\n // Extract speakers from utterances (if available)\n const speakers =\n response.results.utterances && response.results.utterances.length > 0\n ? response.results.utterances.map((utterance) => ({\n id: utterance.speaker?.toString() || \"unknown\",\n speaker: utterance.speaker?.toString() || \"unknown\",\n text: utterance.transcript || \"\",\n confidence: utterance.confidence\n }))\n : undefined\n\n // Extract utterances (if available)\n const utterances =\n response.results.utterances && response.results.utterances.length > 0\n ? response.results.utterances.map((utterance) => ({\n text: utterance.transcript || \"\",\n start: utterance.start || 0,\n end: utterance.end || 0,\n speaker: utterance.speaker?.toString(),\n confidence: utterance.confidence,\n words:\n utterance.words && utterance.words.length > 0\n ? utterance.words.map((word) => ({\n text: word.word || \"\",\n start: word.start || 0,\n end: word.end || 0,\n confidence: word.confidence\n }))\n : undefined\n }))\n : undefined\n\n // Extract summary (if available)\n const summary = alternative.summaries?.[0]?.summary\n\n return {\n success: true,\n provider: this.provider,\n eventType: \"transcription.completed\",\n data: {\n id: requestId || \"\",\n status: \"completed\",\n text: transcript,\n confidence: alternative.confidence,\n duration,\n language: response.metadata.models?.[0] || undefined,\n speakers: speakers && speakers.length > 0 ? speakers : undefined,\n words: words && words.length > 0 ? words : undefined,\n utterances: utterances && utterances.length > 0 ? utterances : undefined,\n summary,\n metadata: {\n channels: response.metadata.channels,\n created: response.metadata.created,\n models: response.metadata.models\n }\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n } catch (error) {\n return this.createErrorEvent(\n payload,\n `Failed to parse Deepgram webhook: ${error instanceof Error ? error.message : \"Unknown error\"}`\n )\n }\n }\n\n /**\n * Verify Deepgram webhook signature\n *\n * Note: Deepgram does not currently support webhook signature verification.\n * For security, use HTTPS and validate the request source (IP allowlist, etc.).\n *\n * @returns Always returns true (no verification available)\n */\n verify(): boolean {\n // Deepgram does not currently support webhook signature verification\n // Return true to indicate no verification is required\n return true\n }\n}\n\n/**\n * Factory function to create a Deepgram webhook handler\n */\nexport function createDeepgramWebhookHandler(): DeepgramWebhookHandler {\n return new DeepgramWebhookHandler()\n}\n","/**\n * Azure Speech-to-Text webhook handler\n * Parses and normalizes Azure STT webhook callbacks\n */\n\nimport { BaseWebhookHandler } from \"./base-webhook\"\nimport type { UnifiedWebhookEvent, WebhookVerificationOptions } from \"./types\"\nimport type { TranscriptionProvider } from \"../router/types\"\nimport crypto from \"node:crypto\"\n\n/**\n * Azure webhook event payload structure\n * Based on Azure Speech Services v3.1 webhook format\n */\ninterface AzureWebhookPayload {\n /** Event action (e.g., \"TranscriptionCreated\", \"TranscriptionSucceeded\", \"TranscriptionFailed\") */\n action: string\n /** Timestamp of the event */\n timestamp: string\n /** Self-link to the resource */\n self?: string\n /** Additional properties */\n properties?: Record<string, unknown>\n /** Error details (for failed events) */\n error?: {\n code: string\n message: string\n }\n}\n\n/**\n * Azure webhook handler\n *\n * Handles webhook callbacks from Azure Speech Services API:\n * - TranscriptionCreated - Transcription job created\n * - TranscriptionRunning - Transcription is processing\n * - TranscriptionSucceeded - Transcription completed successfully\n * - TranscriptionFailed - Transcription failed with error\n *\n * Azure supports optional webhook signature verification using a shared secret.\n *\n * @example Basic usage\n * ```typescript\n * import { AzureWebhookHandler } from '@meeting-baas/sdk';\n *\n * const handler = new AzureWebhookHandler();\n *\n * // Validate webhook\n * const validation = handler.validate(req.body);\n * if (!validation.valid) {\n * return res.status(400).json({ error: validation.error });\n * }\n *\n * // Parse webhook\n * const event = handler.parse(req.body);\n * console.log('Event type:', event.eventType);\n * console.log('Action:', event.raw.action);\n * ```\n *\n * @example With signature verification\n * ```typescript\n * // Verify webhook signature (if configured in Azure)\n * const isValid = handler.verify(req.body, {\n * signature: req.headers['x-azure-signature'],\n * secret: process.env.AZURE_WEBHOOK_SECRET,\n * rawBody: req.rawBody\n * });\n *\n * if (!isValid) {\n * return res.status(401).json({ error: 'Invalid signature' });\n * }\n * ```\n *\n * @example Processing completed transcription\n * ```typescript\n * const event = handler.parse(req.body);\n *\n * if (event.eventType === 'transcription.completed') {\n * // Extract transcription ID from self link\n * const transcriptionId = event.data?.id;\n *\n * // Fetch full transcript using AzureAdapter.getTranscript(transcriptionId)\n * console.log('Transcription completed:', transcriptionId);\n * }\n * ```\n */\nexport class AzureWebhookHandler extends BaseWebhookHandler {\n readonly provider: TranscriptionProvider = \"azure-stt\"\n\n /**\n * Check if payload matches Azure webhook format\n */\n matches(\n payload: unknown,\n _options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): boolean {\n if (!payload || typeof payload !== \"object\") {\n return false\n }\n\n const obj = payload as Record<string, unknown>\n\n // Azure webhooks have \"action\" and \"timestamp\" fields\n if (!(\"action\" in obj) || !(\"timestamp\" in obj)) {\n return false\n }\n\n // action should be a string\n if (typeof obj.action !== \"string\") {\n return false\n }\n\n // Action should start with \"Transcription\"\n if (!obj.action.startsWith(\"Transcription\")) {\n return false\n }\n\n return true\n }\n\n /**\n * Parse Azure webhook payload to unified format\n */\n parse(\n payload: unknown,\n _options?: { queryParams?: Record<string, string> }\n ): UnifiedWebhookEvent {\n if (!this.matches(payload)) {\n return this.createErrorEvent(payload, \"Invalid Azure webhook payload\")\n }\n\n const webhookPayload = payload as AzureWebhookPayload\n const action = webhookPayload.action\n const timestamp = webhookPayload.timestamp\n\n // Extract transcription ID from self link\n // Format: https://{region}.api.cognitive.microsoft.com/speechtotext/v3.1/transcriptions/{id}\n let transcriptionId = \"\"\n if (webhookPayload.self) {\n const match = webhookPayload.self.match(/\\/transcriptions\\/([^/?]+)/)\n if (match) {\n transcriptionId = match[1]\n }\n }\n\n // Map Azure actions to unified event types\n if (action === \"TranscriptionCreated\") {\n return {\n success: true,\n provider: this.provider,\n eventType: \"transcription.created\",\n data: {\n id: transcriptionId,\n status: \"queued\",\n createdAt: timestamp\n },\n timestamp,\n raw: payload\n }\n }\n\n if (action === \"TranscriptionRunning\") {\n return {\n success: true,\n provider: this.provider,\n eventType: \"transcription.processing\",\n data: {\n id: transcriptionId,\n status: \"processing\"\n },\n timestamp,\n raw: payload\n }\n }\n\n if (action === \"TranscriptionSucceeded\") {\n return {\n success: true,\n provider: this.provider,\n eventType: \"transcription.completed\",\n data: {\n id: transcriptionId,\n status: \"completed\",\n completedAt: timestamp\n // Note: Full transcript data needs to be fetched via API\n // using AzureAdapter.getTranscript(transcriptionId)\n },\n timestamp,\n raw: payload\n }\n }\n\n if (action === \"TranscriptionFailed\") {\n return {\n success: false,\n provider: this.provider,\n eventType: \"transcription.failed\",\n data: {\n id: transcriptionId,\n status: \"error\",\n error: webhookPayload.error?.message || \"Transcription failed\",\n metadata: {\n errorCode: webhookPayload.error?.code\n }\n },\n timestamp,\n raw: payload\n }\n }\n\n // Unknown action\n return this.createErrorEvent(payload, `Unknown Azure webhook action: ${action}`)\n }\n\n /**\n * Verify Azure webhook signature\n *\n * Azure can optionally sign webhooks using HMAC-SHA256.\n * The signature is sent in the X-Azure-Signature header.\n *\n * Note: Signature verification is optional in Azure and must be\n * configured when creating the webhook.\n *\n * @param payload - Webhook payload\n * @param options - Verification options with signature and secret\n * @returns true if signature is valid or no signature provided\n *\n * @example\n * ```typescript\n * const isValid = handler.verify(req.body, {\n * signature: req.headers['x-azure-signature'],\n * secret: process.env.AZURE_WEBHOOK_SECRET,\n * rawBody: req.rawBody\n * });\n * ```\n */\n verify(payload: unknown, options: WebhookVerificationOptions): boolean {\n // If no signature provided, skip verification\n // (Azure webhooks can be configured without signatures)\n if (!options.signature) {\n return true\n }\n\n // Need secret to verify\n if (!options.secret) {\n return false\n }\n\n try {\n // Use raw body if provided, otherwise stringify payload\n const body =\n options.rawBody || (typeof payload === \"string\" ? payload : JSON.stringify(payload))\n\n // Compute HMAC-SHA256 signature\n const hmac = crypto.createHmac(\"sha256\", options.secret)\n const bodyBuffer = typeof body === \"string\" ? Buffer.from(body) : body\n hmac.update(bodyBuffer)\n const computedSignature = hmac.digest(\"hex\")\n\n // Compare signatures (constant-time comparison)\n return crypto.timingSafeEqual(Buffer.from(options.signature), Buffer.from(computedSignature))\n } catch (error) {\n // If any error occurs during verification, treat as invalid\n return false\n }\n }\n}\n\n/**\n * Factory function to create an Azure webhook handler\n */\nexport function createAzureWebhookHandler(): AzureWebhookHandler {\n return new AzureWebhookHandler()\n}\n","/**\n * Speechmatics webhook handler\n * Parses and normalizes Speechmatics webhook callbacks\n */\n\nimport { BaseWebhookHandler } from \"./base-webhook\"\nimport type { UnifiedWebhookEvent } from \"./types\"\nimport type { TranscriptionProvider } from \"../router/types\"\nimport type { TranscriptionResponse } from \"../types/speechmatics\"\n\n/**\n * Speechmatics webhook handler\n *\n * Handles webhook callbacks from Speechmatics API.\n * Speechmatics sends job completion notifications via POST with:\n * - Query parameters: id (job ID) and status (success/error/fetch_error/trim_error)\n * - User agent: \"Speechmatics-API/2.0\"\n * - Body: transcript JSON or multipart data depending on configuration\n *\n * @see https://docs.speechmatics.com/features-other/notifications\n *\n * @example\n * ```typescript\n * import { SpeechmaticsWebhookHandler } from '@meeting-baas/sdk';\n *\n * const handler = new SpeechmaticsWebhookHandler();\n *\n * // Validate webhook\n * const validation = handler.validate(req.body, {\n * queryParams: req.query, // Include query params for status check\n * userAgent: req.headers['user-agent']\n * });\n *\n * if (!validation.valid) {\n * return res.status(400).json({ error: validation.error });\n * }\n *\n * // Parse webhook\n * const event = handler.parse(req.body, {\n * queryParams: req.query\n * });\n *\n * if (event.eventType === 'transcription.completed') {\n * console.log('Transcript:', event.data?.text);\n * }\n * ```\n */\nexport class SpeechmaticsWebhookHandler extends BaseWebhookHandler {\n readonly provider: TranscriptionProvider = \"speechmatics\"\n\n /**\n * Check if payload matches Speechmatics webhook format\n */\n matches(\n payload: unknown,\n options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): boolean {\n // Check user agent if provided\n if (options?.userAgent) {\n if (!options.userAgent.includes(\"Speechmatics-API\")) {\n return false\n }\n }\n\n // Check for required query params\n if (options?.queryParams) {\n const { id, status } = options.queryParams\n if (!id || !status) {\n return false\n }\n }\n\n // Speechmatics can send either JSON or multipart data\n // For JSON transcript, check for expected structure\n if (payload && typeof payload === \"object\") {\n const obj = payload as Record<string, unknown>\n\n // Check for Speechmatics transcript format\n if (\"format\" in obj && \"job\" in obj && \"metadata\" in obj) {\n return true\n }\n\n // Could also be a simple status object\n if (\"job\" in obj || \"id\" in obj) {\n return true\n }\n }\n\n // If we can't determine from payload alone, rely on query params\n return !!options?.queryParams?.id && !!options?.queryParams?.status\n }\n\n /**\n * Validate webhook request\n */\n validate(\n payload: unknown,\n options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): { valid: boolean; error?: string } {\n // Check for required query parameters\n if (!options?.queryParams?.id) {\n return {\n valid: false,\n error: \"Missing required query parameter: id\"\n }\n }\n\n if (!options?.queryParams?.status) {\n return {\n valid: false,\n error: \"Missing required query parameter: status\"\n }\n }\n\n // Validate status value\n const validStatuses = [\"success\", \"error\", \"fetch_error\", \"trim_error\"]\n if (!validStatuses.includes(options.queryParams.status)) {\n return {\n valid: false,\n error: `Invalid status value: ${options.queryParams.status}`\n }\n }\n\n // Optional: Check user agent\n if (options?.userAgent && !options.userAgent.includes(\"Speechmatics-API\")) {\n return {\n valid: false,\n error: \"Invalid user agent (expected Speechmatics-API/2.0)\"\n }\n }\n\n return { valid: true }\n }\n\n /**\n * Parse webhook payload into unified event format\n */\n parse(payload: unknown, options?: { queryParams?: Record<string, string> }): UnifiedWebhookEvent {\n const queryParams = options?.queryParams || {}\n const jobId = queryParams.id\n const status = queryParams.status\n\n // Determine event type based on status\n let eventType: UnifiedWebhookEvent[\"eventType\"]\n if (status === \"success\") {\n eventType = \"transcription.completed\"\n } else if (status === \"error\" || status === \"fetch_error\" || status === \"trim_error\") {\n eventType = \"transcription.failed\"\n } else {\n eventType = \"transcription.created\"\n }\n\n // Parse transcript if available and status is success\n if (status === \"success\" && payload && typeof payload === \"object\") {\n const transcript = payload as TranscriptionResponse\n\n if (transcript.results && transcript.job) {\n // Extract full text\n const text = transcript.results\n .filter((r) => r.type === \"word\" && r.alternatives)\n .map((r) => r.alternatives![0]?.content || \"\")\n .join(\" \")\n\n // Extract speakers if present\n const speakerSet = new Set<string>()\n transcript.results.forEach((r) => {\n if (r.alternatives) {\n const speaker = r.alternatives[0]?.speaker\n if (speaker) speakerSet.add(speaker)\n }\n })\n\n const speakers =\n speakerSet.size > 0\n ? Array.from(speakerSet).map((id) => ({\n id,\n label: `Speaker ${id}`\n }))\n : undefined\n\n return {\n success: true,\n provider: this.provider,\n eventType,\n timestamp: new Date().toISOString(),\n data: {\n id: jobId,\n text,\n status: \"completed\",\n language: transcript.metadata.transcription_config?.language,\n duration: transcript.job.duration,\n speakers,\n createdAt: transcript.job.created_at\n },\n raw: payload\n }\n }\n }\n\n // Return minimal event for non-success or incomplete payloads\n return {\n success: status === \"success\",\n provider: this.provider,\n eventType,\n timestamp: new Date().toISOString(),\n data: {\n id: jobId,\n text: \"\",\n status: status === \"success\" ? \"completed\" : \"error\"\n },\n raw: payload\n }\n }\n}\n","/**\n * Webhook router with automatic provider detection\n * Routes webhook payloads to the correct provider handler\n */\n\nimport type { BaseWebhookHandler } from \"./base-webhook\"\nimport { GladiaWebhookHandler } from \"./gladia-webhook\"\nimport { AssemblyAIWebhookHandler } from \"./assemblyai-webhook\"\nimport { DeepgramWebhookHandler } from \"./deepgram-webhook\"\nimport { AzureWebhookHandler } from \"./azure-webhook\"\nimport { SpeechmaticsWebhookHandler } from \"./speechmatics-webhook\"\nimport type { UnifiedWebhookEvent, WebhookValidation, WebhookVerificationOptions } from \"./types\"\nimport type { TranscriptionProvider } from \"../router/types\"\n\n/**\n * Webhook router options\n */\nexport interface WebhookRouterOptions {\n /**\n * Specific provider to use (skips auto-detection)\n */\n provider?: TranscriptionProvider\n\n /**\n * Webhook verification options (signature, secret, etc.)\n */\n verification?: WebhookVerificationOptions\n\n /**\n * Whether to verify webhook signatures\n * @default true\n */\n verifySignature?: boolean\n\n /**\n * Query parameters from the webhook request\n * (e.g., for Speechmatics: ?id=<job_id>&status=success)\n */\n queryParams?: Record<string, string>\n\n /**\n * User agent from the webhook request headers\n * (e.g., for Speechmatics: \"Speechmatics-API/2.0\")\n */\n userAgent?: string\n}\n\n/**\n * Webhook router result\n */\nexport interface WebhookRouterResult {\n /**\n * Whether routing was successful\n */\n success: boolean\n\n /**\n * Detected or specified provider\n */\n provider?: TranscriptionProvider\n\n /**\n * Parsed unified webhook event\n */\n event?: UnifiedWebhookEvent\n\n /**\n * Error message if routing failed\n */\n error?: string\n\n /**\n * Whether signature verification was performed and passed\n */\n verified?: boolean\n}\n\n/**\n * Webhook router with automatic provider detection\n *\n * Automatically detects the webhook provider from the payload structure\n * and routes to the appropriate handler for parsing and normalization.\n *\n * @example Basic usage with auto-detection\n * ```typescript\n * import { WebhookRouter } from '@meeting-baas/sdk';\n *\n * const router = new WebhookRouter();\n *\n * // Auto-detect provider and parse webhook\n * const result = router.route(req.body);\n *\n * if (result.success) {\n * console.log('Provider:', result.provider);\n * console.log('Event type:', result.event?.eventType);\n * console.log('Transcript ID:', result.event?.data?.id);\n * } else {\n * console.error('Error:', result.error);\n * }\n * ```\n *\n * @example With signature verification\n * ```typescript\n * const router = new WebhookRouter();\n *\n * const result = router.route(req.body, {\n * verification: {\n * signature: req.headers['x-signature'],\n * secret: process.env.WEBHOOK_SECRET,\n * rawBody: req.rawBody\n * }\n * });\n *\n * if (!result.verified) {\n * return res.status(401).json({ error: 'Invalid signature' });\n * }\n * ```\n *\n * @example Specify provider explicitly\n * ```typescript\n * const router = new WebhookRouter();\n *\n * // Skip auto-detection, use specific provider\n * const result = router.route(req.body, {\n * provider: 'gladia'\n * });\n * ```\n *\n * @example Express.js middleware\n * ```typescript\n * import express from 'express';\n * import { WebhookRouter } from '@meeting-baas/sdk';\n *\n * const app = express();\n * const router = new WebhookRouter();\n *\n * app.post('/webhooks/transcription', express.json(), (req, res) => {\n * const result = router.route(req.body, {\n * verification: {\n * signature: req.headers['x-signature'] as string,\n * secret: process.env.WEBHOOK_SECRET!\n * }\n * });\n *\n * if (!result.success) {\n * return res.status(400).json({ error: result.error });\n * }\n *\n * if (!result.verified) {\n * return res.status(401).json({ error: 'Invalid signature' });\n * }\n *\n * // Process webhook event\n * console.log('Received webhook from:', result.provider);\n * console.log('Event:', result.event);\n *\n * res.status(200).json({ received: true });\n * });\n * ```\n */\nexport class WebhookRouter {\n private handlers: Map<TranscriptionProvider, BaseWebhookHandler>\n\n constructor() {\n // Initialize all provider handlers\n this.handlers = new Map([\n [\"gladia\", new GladiaWebhookHandler()],\n [\"assemblyai\", new AssemblyAIWebhookHandler()],\n [\"deepgram\", new DeepgramWebhookHandler()],\n [\"azure-stt\", new AzureWebhookHandler()],\n [\"speechmatics\", new SpeechmaticsWebhookHandler()]\n ])\n }\n\n /**\n * Route webhook payload to the correct handler\n *\n * @param payload - Raw webhook payload\n * @param options - Routing options (provider, verification, etc.)\n * @returns Routing result with parsed event\n */\n route(payload: unknown, options?: WebhookRouterOptions): WebhookRouterResult {\n // If provider is specified, use that handler directly\n if (options?.provider) {\n return this.routeToProvider(payload, options.provider, options)\n }\n\n // Auto-detect provider\n const detectedProvider = this.detectProvider(payload, {\n queryParams: options?.queryParams,\n userAgent: options?.userAgent\n })\n\n if (!detectedProvider) {\n return {\n success: false,\n error: \"Could not detect webhook provider from payload structure\"\n }\n }\n\n return this.routeToProvider(payload, detectedProvider, options)\n }\n\n /**\n * Detect provider from webhook payload structure\n *\n * @param payload - Raw webhook payload\n * @param options - Detection options (query params, user agent, etc.)\n * @returns Detected provider or undefined\n */\n detectProvider(\n payload: unknown,\n options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): TranscriptionProvider | undefined {\n // Try each handler's matches() method\n for (const [provider, handler] of this.handlers) {\n if (handler.matches(payload, options)) {\n return provider\n }\n }\n\n return undefined\n }\n\n /**\n * Validate webhook payload\n *\n * @param payload - Raw webhook payload\n * @param options - Routing options\n * @returns Validation result\n */\n validate(payload: unknown, options?: WebhookRouterOptions): WebhookValidation {\n // If provider is specified, use that handler directly\n if (options?.provider) {\n const handler = this.handlers.get(options.provider)\n if (!handler) {\n return {\n valid: false,\n error: `Unknown provider: ${options.provider}`\n }\n }\n return handler.validate(payload, {\n queryParams: options.queryParams,\n userAgent: options.userAgent\n })\n }\n\n // Auto-detect provider\n const detectedProvider = this.detectProvider(payload, {\n queryParams: options?.queryParams,\n userAgent: options?.userAgent\n })\n\n if (!detectedProvider) {\n return {\n valid: false,\n error: \"Could not detect webhook provider from payload structure\"\n }\n }\n\n const handler = this.handlers.get(detectedProvider)\n if (!handler) {\n return {\n valid: false,\n error: `Handler not found for provider: ${detectedProvider}`\n }\n }\n\n return handler.validate(payload, {\n queryParams: options?.queryParams,\n userAgent: options?.userAgent\n })\n }\n\n /**\n * Verify webhook signature\n *\n * @param payload - Raw webhook payload\n * @param provider - Provider name\n * @param options - Verification options\n * @returns true if signature is valid\n */\n verify(\n payload: unknown,\n provider: TranscriptionProvider,\n options: WebhookVerificationOptions\n ): boolean {\n const handler = this.handlers.get(provider)\n if (!handler || !handler.verify) {\n // No verification available for this provider\n return true\n }\n\n return handler.verify(payload, options)\n }\n\n /**\n * Route to a specific provider handler\n */\n private routeToProvider(\n payload: unknown,\n provider: TranscriptionProvider,\n options?: WebhookRouterOptions\n ): WebhookRouterResult {\n const handler = this.handlers.get(provider)\n\n if (!handler) {\n return {\n success: false,\n error: `Handler not found for provider: ${provider}`\n }\n }\n\n // Verify signature if requested\n let verified = true\n if (options?.verifySignature !== false && options?.verification && handler.verify) {\n verified = handler.verify(payload, options.verification)\n if (!verified) {\n return {\n success: false,\n provider,\n error: \"Webhook signature verification failed\",\n verified: false\n }\n }\n }\n\n // Validate payload\n const validation = handler.validate(payload, {\n queryParams: options?.queryParams,\n userAgent: options?.userAgent\n })\n if (!validation.valid) {\n return {\n success: false,\n provider,\n error: validation.error,\n verified\n }\n }\n\n // Parse payload\n try {\n const event = handler.parse(payload, {\n queryParams: options?.queryParams\n })\n\n return {\n success: true,\n provider,\n event,\n verified\n }\n } catch (error) {\n return {\n success: false,\n provider,\n error: `Failed to parse webhook: ${error instanceof Error ? error.message : \"Unknown error\"}`,\n verified\n }\n }\n }\n\n /**\n * Get handler for a specific provider\n *\n * @param provider - Provider name\n * @returns Handler instance or undefined\n */\n getHandler(provider: TranscriptionProvider): BaseWebhookHandler | undefined {\n return this.handlers.get(provider)\n }\n\n /**\n * Get all registered providers\n *\n * @returns Array of provider names\n */\n getProviders(): TranscriptionProvider[] {\n return Array.from(this.handlers.keys())\n }\n}\n\n/**\n * Factory function to create a webhook router\n */\nexport function createWebhookRouter(): WebhookRouter {\n return new WebhookRouter()\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,yBAAAA;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;AC8FO,IAAM,cAAN,MAAkB;AAAA,EAKvB,YAAY,QAA2B;AAJvC,SAAQ,WAA6D,oBAAI,IAAI;AAE7E,SAAQ,kBAAkB;AAGxB,SAAK,SAAS;AAAA,MACZ,mBAAmB;AAAA,MACnB,GAAG;AAAA,IACL;AAGA,QAAI,OAAO,KAAK,OAAO,SAAS,EAAE,WAAW,GAAG;AAC9C,YAAM,IAAI,MAAM,0DAA0D;AAAA,IAC5E;AAGA,QAAI,KAAK,OAAO,sBAAsB,aAAa,CAAC,KAAK,OAAO,iBAAiB;AAE/E,WAAK,OAAO,kBAAkB,OAAO,KAAK,OAAO,SAAS,EAAE,CAAC;AAAA,IAC/D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBA,gBAAgB,SAAqC;AAEnD,UAAM,iBAAiB,KAAK,OAAO,UAAU,QAAQ,IAAI;AACzD,QAAI,CAAC,gBAAgB;AACnB,YAAM,IAAI,MAAM,wCAAwC,QAAQ,IAAI,EAAE;AAAA,IACxE;AAEA,YAAQ,WAAW,cAAc;AACjC,SAAK,SAAS,IAAI,QAAQ,MAAM,OAAO;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,UAAuD;AAChE,UAAM,UAAU,KAAK,SAAS,IAAI,QAAQ;AAC1C,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI;AAAA,QACR,aAAa,QAAQ,6CAA6C,MAAM,KAAK,KAAK,SAAS,KAAK,CAAC,EAAE,KAAK,IAAI,CAAC;AAAA,MAC/G;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,mBAAkE;AAEvF,QAAI,mBAAmB;AACrB,UAAI,CAAC,KAAK,SAAS,IAAI,iBAAiB,GAAG;AACzC,cAAM,IAAI;AAAA,UACR,aAAa,iBAAiB,6CAA6C,MAAM,KAAK,KAAK,SAAS,KAAK,CAAC,EAAE,KAAK,IAAI,CAAC;AAAA,QACxH;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAGA,YAAQ,KAAK,OAAO,mBAAmB;AAAA,MACrC,KAAK;AACH,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MAEF,KAAK,eAAe;AAClB,cAAM,YAAY,MAAM,KAAK,KAAK,SAAS,KAAK,CAAC;AACjD,cAAM,WAAW,UAAU,KAAK,kBAAkB,UAAU,MAAM;AAClE,aAAK;AACL,eAAO;AAAA,MACT;AAAA,MAEA,KAAK;AAAA,MACL;AACE,YAAI,CAAC,KAAK,OAAO,iBAAiB;AAChC,gBAAM,IAAI,MAAM,gCAAgC;AAAA,QAClD;AACA,eAAO,KAAK,OAAO;AAAA,IACvB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwCA,MAAM,WACJ,OACA,SACoC;AACpC,UAAM,WAAW,KAAK,eAAe,SAAS,QAAQ;AACtD,UAAM,UAAU,KAAK,WAAW,QAAQ;AAGxC,UAAM,EAAE,UAAU,GAAG,GAAG,eAAe,IAAI,WAAW,CAAC;AAEvD,WAAO,QAAQ,WAAW,OAAO,cAAc;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,cACJ,cACA,UACoC;AACpC,UAAM,UAAU,KAAK,WAAW,QAAQ;AACxC,WAAO,QAAQ,cAAc,YAAY;AAAA,EAC3C;AAAA;AAAA,EAuFA,MAAM,iBACJ,SAKA,WAC2B;AAC3B,UAAM,WAAW,KAAK,eAAe,SAAS,QAAQ;AACtD,UAAM,UAAU,KAAK,WAAW,QAAQ;AAGxC,QAAI,CAAC,QAAQ,aAAa,aAAa,CAAC,QAAQ,kBAAkB;AAChE,YAAM,IAAI,MAAM,aAAa,QAAQ,4CAA4C;AAAA,IACnF;AAIA,UAAM,EAAE,UAAU,GAAG,GAAG,eAAe,IAAI,WAAW,CAAC;AAEvD,WAAO,QAAQ,iBAAiB,gBAAoC,SAAS;AAAA,EAC/E;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBACJ,cACA,UAC+B;AAC/B,UAAM,UAAU,KAAK,WAAW,QAAQ;AAExC,QAAI,CAAC,QAAQ,kBAAkB;AAC7B,YAAM,IAAI,MAAM,aAAa,QAAQ,yCAAyC;AAAA,IAChF;AAEA,WAAO,QAAQ,iBAAiB,YAAY;AAAA,EAC9C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,gBACJ,UACA,SASC;AACD,UAAM,UAAU,KAAK,WAAW,QAAQ;AAExC,QAAI,CAAC,QAAQ,iBAAiB;AAC5B,YAAM,IAAI,MAAM,aAAa,QAAQ,wCAAwC;AAAA,IAC/E;AAEA,WAAO,QAAQ,gBAAgB,OAAO;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,wBAAwB,UAAiC;AACvD,UAAM,UAAU,KAAK,WAAW,QAAQ;AACxC,WAAO,QAAQ;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA,EAKA,yBAAkD;AAChD,WAAO,MAAM,KAAK,KAAK,SAAS,KAAK,CAAC;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,qBAAqB,UAA0C;AAC7D,UAAM,UAAU,KAAK,WAAW,QAAQ;AAExC,QAAI,CAAC,QAAQ,cAAc;AACzB,YAAM,IAAI,MAAM,aAAa,QAAQ,gCAAgC;AAAA,IACvE;AAEA,WAAO,QAAQ,aAAa;AAAA,EAC9B;AACF;AAKO,SAAS,kBACd,QACA,UACa;AACb,QAAM,SAAS,IAAI,YAAY,MAAM;AAGrC,MAAI,YAAY,SAAS,SAAS,GAAG;AACnC,eAAW,WAAW,UAAU;AAC9B,aAAO,gBAAgB,OAAO;AAAA,IAChC;AAAA,EACF;AAEA,SAAO;AACT;;;ACvaO,IAAM,4BAA4B;AAAA,EACvC,UAAU;AAAA,EACV,MAAM;AAAA,EACN,OAAO;AAAA,EACP,MAAM;AAAA,EACN,OAAO;AAAA,EACP,MAAM;AACR;;;ACXO,IAAM,2BAA2B;AAAA,EACtC,UAAU;AAAA,EACV,KAAK;AAAA,EACL,MAAM;AAAA,EACN,KAAK;AAAA,EACL,MAAM;AAAA,EACN,OAAO;AAAA,EACP,MAAM;AACR;;;ACRO,IAAM,4BAA4B;AAAA,EACvC,MAAM;AAAA,EACN,KAAK;AAAA,EACL,KAAK;AACP;;;ACAO,IAAM,6BAA6B;AAAA,EACxC,cAAc;AAAA,EACd,cAAc;AAAA,EACd,cAAc;AAAA,EACd,cAAc;AAAA,EACd,MAAM;AAAA,EACN,aAAa;AAAA,EACb,cAAc;AAChB;;;ACvBO,IAAM,iCAAiC;AAAA,EAC5C,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,YAAY;AACd;;;ACTO,IAAM,mCAAmC;AAAA,EAC9C,aAAa;AAAA,EACb,cAAc;AAAA,EACd,cAAc;AAAA,EACd,cAAc;AAAA,EACd,cAAc;AAChB;;;ACNO,IAAM,iCAAiC;AAAA,EAC5C,UAAU;AAAA,EACV,WAAW;AAAA,EACX,WAAW;AAAA,EACX,WAAW;AACb;;;ACTO,IAAM,mBAAmB;AAAA;AAAA,EAE9B,cAAc;AAAA;AAAA,EAGd,kBAAkB;AAAA;AAAA,EAGlB,eAAe;AAAA;AAAA,EAGf,UAAU;AACZ;AAKO,IAAM,kBAAkB;AAAA;AAAA,EAE7B,cAAc;AAAA;AAAA,EAGd,aAAa;AAAA;AAAA,EAGb,kBAAkB;AACpB;;;ACxBO,IAAM,cAAc;AAAA;AAAA,EAEzB,aAAa;AAAA;AAAA,EAGb,iBAAiB;AAAA;AAAA,EAGjB,iBAAiB;AAAA;AAAA,EAGjB,qBAAqB;AAAA;AAAA,EAGrB,oBAAoB;AAAA;AAAA,EAGpB,eAAe;AAAA;AAAA,EAGf,eAAe;AAAA;AAAA,EAGf,YAAY;AAAA;AAAA,EAGZ,eAAe;AACjB;AASO,IAAM,iBAA4C;AAAA,EACvD,aAAa;AAAA,EACb,iBAAiB;AAAA,EACjB,iBAAiB;AAAA,EACjB,qBAAqB;AAAA,EACrB,oBAAoB;AAAA,EACpB,eAAe;AAAA,EACf,eAAe;AAAA,EACf,YAAY;AAAA,EACZ,eAAe;AACjB;AAmCO,SAAS,YACd,MACA,eACA,SACe;AACf,SAAO;AAAA,IACL;AAAA,IACA,SAAS,iBAAiB,eAAe,IAAI;AAAA,IAC7C;AAAA,EACF;AACF;;;ACoCO,IAAe,cAAf,MAA2D;AAAA,EAWhE,WAAW,QAA8B;AACvC,SAAK,SAAS;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBU,oBACR,OACA,YACA,MAC2B;AAC3B,UAAM,MAAM;AAOZ,UAAM,aAAa,cAAc,IAAI,cAAc,IAAI,UAAU;AACjE,UAAM,iBAAiB,IAAI,UAAU;AACrC,UAAM,eAAe,IAAI,UAAU;AAEnC,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,OAAO;AAAA,QACL,MAAM,QAAQ,IAAI,QAAQ,YAAY;AAAA,QACtC,SAAS,IAAI,WAAW;AAAA,QACxB,YAAY;AAAA,QACZ,SAAS;AAAA;AAAA,UAEP;AAAA;AAAA,UAEA,OAAO,IAAI;AAAA;AAAA,UAEX;AAAA,UACA;AAAA,UACA;AAAA;AAAA,UAEA,UAAU,KAAK;AAAA,QACjB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKU,iBAAuB;AAC/B,QAAI,CAAC,KAAK,QAAQ;AAChB,YAAM,IAAI,MAAM,WAAW,KAAK,IAAI,+CAA+C;AAAA,IACrF;AACA,QAAI,CAAC,KAAK,OAAO,QAAQ;AACvB,YAAM,IAAI,MAAM,2BAA2B,KAAK,IAAI,WAAW;AAAA,IACjE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASU,eACR,iBAAyB,iBACzB,iBAKA;AACA,SAAK,eAAe;AAEpB,UAAM,YAAY,kBAAkB,gBAAgB,KAAK,OAAQ,MAAM,IAAI,KAAK,OAAQ;AAExF,WAAO;AAAA,MACL,SAAS,KAAK,OAAQ,WAAW,KAAK;AAAA,MACtC,SAAS,KAAK,OAAQ,WAAW,iBAAiB;AAAA,MAClD,SAAS;AAAA,QACP,CAAC,cAAc,GAAG;AAAA,QAClB,gBAAgB;AAAA,QAChB,GAAG,KAAK,OAAQ;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAgB,kBACd,cACA,SAIoC;AACpC,UAAM,EAAE,cAAc,gBAAgB,cAAc,aAAa,gBAAgB,YAAY,IAC3F,WAAW,CAAC;AAEd,aAAS,UAAU,GAAG,UAAU,aAAa,WAAW;AACtD,YAAM,SAAS,MAAM,KAAK,cAAc,YAAY;AAEpD,UAAI,CAAC,OAAO,SAAS;AACnB,eAAO;AAAA,MACT;AAEA,YAAM,SAAS,OAAO,MAAM;AAC5B,UAAI,WAAW,aAAa;AAC1B,eAAO;AAAA,MACT;AAEA,UAAI,WAAW,SAAS;AACtB,eAAO,KAAK;AAAA,UACV,IAAI,MAAM,sBAAsB;AAAA,UAChC;AAAA,UACA,YAAY;AAAA,QACd;AAAA,MACF;AAEA,YAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,UAAU,CAAC;AAAA,IAChE;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,OAAO;AAAA,QACL,MAAM,YAAY;AAAA,QAClB,SAAS,wCAAwC,WAAW;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AACF;;;ACtSA,gBAAsB;;;AC4Df,IAAM,sBAA8C;AAAA,EACzD,UAAU;AAAA,EACV,OAAO;AAAA,EACP,MAAM;AACR;AAMO,IAAM,wBAAgD;AAAA,EAC3D,UAAU;AAAA,EACV,OAAO;AAAA,EACP,MAAM;AAAA,EACN,MAAM;AAAA,EACN,OAAO;AAAA,EACP,UAAU;AAAA,EACV,UAAU;AAAA,EACV,MAAM;AACR;AAMO,IAAM,0BAAkD;AAAA,EAC7D,UAAU;AACZ;AAmBO,SAAS,sBACd,iBACA,UACQ;AACR,MAAI;AAEJ,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,gBAAU;AACV;AAAA,IACF,KAAK;AACH,gBAAU;AACV;AAAA,IACF,KAAK;AACH,gBAAU;AACV;AAAA,EACJ;AAEA,QAAM,mBAAmB,QAAQ,eAAe;AAEhD,MAAI,CAAC,kBAAkB;AACrB,UAAM,IAAI;AAAA,MACR,aAAa,eAAe,yBAAyB,QAAQ,0BACnC,OAAO,KAAK,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,IAC3D;AAAA,EACF;AAEA,SAAO;AACT;;;ACjHO,SAAS,qBACd,IACA,YAAoB,iBAAiB,eACtB;AACf,SAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,UAAM,UAAU,WAAW,MAAM;AAC/B,aAAO,IAAI,MAAM,8BAA8B,CAAC;AAAA,IAClD,GAAG,SAAS;AAEZ,OAAG,KAAK,QAAQ,MAAM;AACpB,mBAAa,OAAO;AACpB,cAAQ;AAAA,IACV,CAAC;AAED,OAAG,KAAK,SAAS,CAAC,UAAU;AAC1B,mBAAa,OAAO;AACpB,aAAO,KAAK;AAAA,IACd,CAAC;AAAA,EACH,CAAC;AACH;AAiBO,SAAS,eACd,IACA,YAAoB,iBAAiB,UACtB;AACf,SAAO,IAAI,QAAc,CAAC,YAAY;AACpC,UAAM,UAAU,WAAW,MAAM;AAC/B,SAAG,UAAU;AACb,cAAQ;AAAA,IACV,GAAG,SAAS;AAEZ,OAAG,MAAM;AAET,OAAG,KAAK,SAAS,MAAM;AACrB,mBAAa,OAAO;AACpB,cAAQ;AAAA,IACV,CAAC;AAAA,EACH,CAAC;AACH;AAuBO,SAAS,uBACd,IACA,WACA,kBACM;AACN,KAAG,GAAG,QAAQ,MAAM;AAClB,qBAAiB,MAAM;AACvB,eAAW,SAAS;AAAA,EACtB,CAAC;AAED,KAAG,GAAG,SAAS,CAAC,UAAiB;AAC/B,eAAW,UAAU,YAAY,YAAY,iBAAiB,MAAM,SAAS,KAAK,CAAC;AAAA,EACrF,CAAC;AAED,KAAG,GAAG,SAAS,CAAC,MAAc,WAAmB;AAC/C,qBAAiB,QAAQ;AACzB,eAAW,UAAU,MAAM,OAAO,SAAS,CAAC;AAAA,EAC9C,CAAC;AACH;AAmBO,SAAS,wBACd,eACA,cACA,eACM;AACN,MAAI,kBAAkB,QAAQ;AAC5B,UAAM,IAAI,MAAM,iCAAiC,aAAa,EAAE;AAAA,EAClE;AAEA,MAAI,iBAAiB,eAAe;AAClC,UAAM,IAAI,MAAM,uBAAuB;AAAA,EACzC;AACF;;;ACnGO,SAAS,kBACd,OACA,UACA,WACA,UACY;AACZ,QAAM,cAAc,OAAO,OAAO,QAAQ;AAC1C,QAAM,UAAU,YAAY,KAAK,CAAC,MAAM,MAAM,KAAK;AAEnD,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI;AAAA,MACR,GAAG,QAAQ,qBAAqB,SAAS,KAAK,KAAK,4CACR,YAAY,KAAK,IAAI,CAAC;AAAA,IACnE;AAAA,EACF;AAEA,SAAO;AACT;;;AChCO,SAAS,8BACd,YACA,cACA,aACuB;AACvB,MAAI,CAAC,cAAc,WAAW,WAAW,GAAG;AAC1C,WAAO;AAAA,EACT;AAEA,QAAM,aAAa,oBAAI,IAAY;AAEnC,aAAW,QAAQ,CAAC,cAAc;AAChC,UAAM,YAAY,aAAa,SAAS;AACxC,QAAI,cAAc,QAAW;AAC3B,iBAAW,IAAI,OAAO,SAAS,CAAC;AAAA,IAClC;AAAA,EACF,CAAC;AAED,MAAI,WAAW,SAAS,GAAG;AACzB,WAAO;AAAA,EACT;AAEA,SAAO,MAAM,KAAK,UAAU,EAAE,IAAI,CAAC,eAAe;AAAA,IAChD,IAAI;AAAA,IACJ,OAAO,cAAc,YAAY,SAAS,IAAI,WAAW,SAAS;AAAA,EACpE,EAAE;AACJ;AAwBO,SAAS,aACd,OACA,QACoB;AACpB,MAAI,CAAC,SAAS,MAAM,WAAW,GAAG;AAChC,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,MAAM,IAAI,MAAM;AACxC,SAAO,gBAAgB,SAAS,IAAI,kBAAkB;AACxD;AAQO,IAAM,kBAAkB;AAAA,EAC7B,QAAQ;AAAA,IACN,QAAQ;AAAA,IACR,YAAY;AAAA,IACZ,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA,YAAY;AAAA,IACV,QAAQ;AAAA,IACR,YAAY;AAAA,IACZ,WAAW;AAAA,IACX,OAAO;AAAA,EACT;AAAA,EACA,UAAU;AAAA,IACR,QAAQ;AAAA,IACR,YAAY;AAAA,IACZ,WAAW;AAAA,IACX,OAAO;AAAA,EACT;AAAA,EACA,OAAO;AAAA,IACL,WAAW;AAAA,IACX,SAAS;AAAA,IACT,YAAY;AAAA,IACZ,QAAQ;AAAA,EACV;AAAA,EACA,cAAc;AAAA,IACZ,SAAS;AAAA,IACT,MAAM;AAAA,IACN,UAAU;AAAA,IACV,SAAS;AAAA,EACX;AACF;AA0BO,SAAS,gBACd,gBACA,UACA,gBAAqC,UAChB;AACrB,MAAI,CAAC,eAAgB,QAAO;AAE5B,QAAM,UAAU,gBAAgB,QAAQ;AACxC,QAAM,YAAY,eAAe,SAAS,EAAE,YAAY;AAGxD,MAAI,aAAa,SAAS;AACxB,WAAO,QAAQ,SAAiC;AAAA,EAClD;AAGA,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AAClD,QAAI,UAAU,SAAS,GAAG,GAAG;AAC3B,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;;;ACjLA,mBAAkB;;;ACTlB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACWO,IAAM,2BAA2B;AAAA,EACtC,aAAa;AACf;;;ACHO,IAAM,uBAAuB;AAAA,EAClC,aAAa;AACf;;;ACDO,IAAM,sDAAsD;AAAA,EACjE,WAAW;AAAA,EACX,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,aAAa;AAAA,EACb,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,OAAO;AAAA,EACP,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,WAAW;AAAA,EACX,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,KAAK;AAAA,EACL,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,eAAe;AAAA,EACf,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,OAAO;AAAA,EACP,WAAW;AAAA,EACX,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,EACP,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,MAAM;AAAA,EACN,OAAO;AAAA,EACP,YAAY;AAAA,EACZ,OAAO;AAAA,EACP,SAAS;AAAA,EACT,QAAQ;AACV;;;ACpGO,IAAM,+DAA+D;AAAA,EAC1E,2BAA2B;AAAA,EAC3B,8BAA8B;AAAA,EAC9B,QAAQ;AACV;;;ACJO,IAAM,0DAA0D;AAAA,EACrE,MAAM;AAAA,EACN,KAAK;AAAA,EACL,KAAK;AAAA,EACL,OAAO;AAAA,EACP,KAAK;AACP;;;ACNO,IAAM,uEAAuE;AAAA,EAClF,WAAW;AAAA,EACX,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,aAAa;AAAA,EACb,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,OAAO;AAAA,EACP,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,WAAW;AAAA,EACX,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,KAAK;AAAA,EACL,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,eAAe;AAAA,EACf,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,OAAO;AAAA,EACP,WAAW;AAAA,EACX,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,EACP,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,MAAM;AAAA,EACN,OAAO;AAAA,EACP,YAAY;AAAA,EACZ,OAAO;AAAA,EACP,OAAO;AAAA,EACP,SAAS;AAAA,EACT,QAAQ;AACV;;;ACrGO,IAAM,wCAAwC;AAAA,EACnD,iBAAiB;AACnB;;;ACFO,IAAM,uCAAuC;AAAA,EAClD,mBAAmB;AACrB;;;ACFO,IAAM,qCAAqC;AAAA,EAChD,iBAAiB;AACnB;;;ACFO,IAAM,iDAAiD;AAAA,EAC5D,8BAA8B;AAChC;;;ACFO,IAAM,6CAA6C;AAAA,EACxD,yBAAyB;AAC3B;;;ACFO,IAAM,8CAA8C;AAAA,EACzD,2BAA2B;AAC7B;;;ACFO,IAAM,4CAA4C;AAAA,EACvD,wBAAwB;AAC1B;;;ACFO,IAAM,yCAAyC;AAAA,EACpD,qBAAqB;AACvB;;;ACFO,IAAM,4CAA4C;AAAA,EACvD,wBAAwB;AAC1B;;;ACFO,IAAM,oCAAoC;AAAA,EAC/C,gBAAgB;AAClB;;;ACFO,IAAM,sCAAsC;AAAA,EACjD,kBAAkB;AACpB;;;ACFO,IAAM,yCAAyC;AAAA,EACpD,qBAAqB;AACvB;;;ACFO,IAAM,uCAAuC;AAAA,EAClD,mBAAmB;AACrB;;;ACFO,IAAM,2CAA2C;AAAA,EACtD,oBAAoB;AACtB;;;ACFO,IAAM,qCAAqC;AAAA,EAChD,gBAAgB;AAClB;;;ACFO,IAAM,sCAAsC;AAAA,EACjD,iBAAiB;AACnB;;;ACAO,IAAM,qBAAqB;AAAA,EAChC,MAAM;AAAA,EACN,KAAK;AACP;;;ACFO,IAAM,yCAAyC;AAAA,EACpD,oBAAoB;AACtB;;;ACFO,IAAM,2CAA2C;AAAA,EACtD,sBAAsB;AACxB;;;ACLO,IAAM,0BAA0B;AAAA,EACrC,eAAe;AACjB;;;ACFO,IAAM,wBAAwB;AAAA,EACnC,aAAa;AACf;;;ACFO,IAAM,qCAAqC;AAAA,EAChD,gBAAgB;AAAA,EAChB,MAAM;AACR;;;ACHO,IAAM,uCAAuC;AAAA,EAClD,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,OAAO;AACT;;;ACLO,IAAM,oCAAoC;AAAA,EAC/C,0BAA0B;AAC5B;;;ACFO,IAAM,gCAAgC;AAAA,EAC3C,qBAAqB;AACvB;;;ACFO,IAAM,iCAAiC;AAAA,EAC5C,uBAAuB;AACzB;;;ACFO,IAAM,+BAA+B;AAAA,EAC1C,oBAAoB;AACtB;;;ACFO,IAAM,4BAA4B;AAAA,EACvC,iBAAiB;AACnB;;;ACFO,IAAM,sDAAsD;AAAA,EACjE,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,OAAO;AACT;;;ACLO,IAAM,0BAA0B;AAAA,EACrC,gBAAgB;AAClB;;;ACCO,IAAM,4BAA4B;AAAA,EACvC,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,OAAO;AACT;;;ACRO,IAAM,+BAA+B;AAAA,EAC1C,oBAAoB;AACtB;;;ACHO,IAAM,uBAAuB;AAAA,EAClC,YAAY;AACd;;;ACDO,IAAM,yBAAyB;AAAA,EACpC,cAAc;AAChB;;;ACFO,IAAM,4BAA4B;AAAA,EACvC,iBAAiB;AACnB;;;ACFO,IAAM,0BAA0B;AAAA,EACrC,eAAe;AACjB;;;ACFO,IAAM,8BAA8B;AAAA,EACzC,gBAAgB;AAClB;;;ACFO,IAAM,0BAA0B;AAAA,EACrC,gBAAgB;AAClB;;;ACFO,IAAM,kDAAkD;AAAA,EAC7D,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,OAAO;AACT;;;ACLO,IAAM,wBAAwB;AAAA,EACnC,MAAM;AACR;;;ACCO,IAAM,0BAA0B;AAAA,EACrC,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,OAAO;AACT;;;ACLO,IAAM,2BAA2B;AAAA,EACtC,aAAa;AACf;;;ACLO,IAAM,4BAA4B;AAAA,EACvC,WAAW;AAAA,EACX,WAAW;AACb;;;ACDO,IAAM,sBAAsB;AAAA,EACjC,KAAK;AAAA,EACL,KAAK;AACP;;;ACHO,IAAM,qBAAqB;AAAA,EAChC,SAAS;AAAA,EACT,YAAY;AACd;;;ACHO,IAAM,mBAAmB;AAAA,EAC9B,SAAS;AAAA,EACT,eAAe;AAAA,EACf,SAAS;AACX;;;ACNO,IAAM,wCAAwC;AAAA,EACnD,gBAAgB;AAAA,EAChB,MAAM;AACR;;;ACHO,IAAM,0CAA0C;AAAA,EACrD,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,OAAO;AACT;;;ACFO,IAAM,gCAAgC;AAAA,EAC3C,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,KAAK;AAAA,EACL,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AACN;;;ACvGO,IAAM,wBAAwB;AAAA,EACnC,YAAY;AACd;;;ACCO,IAAM,8BAA8B;AAAA,EACzC,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,KAAK;AAAA,EACL,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AACN;;;ACxGO,IAAM,yBAAyB;AAAA,EACpC,aAAa;AACf;;;ACAO,IAAM,uBAAuB;AAAA,EAClC,MAAM;AAAA,EACN,UAAU;AACZ;;;ACLO,IAAM,sDAAsD;AAAA,EACjE,WAAW;AAAA,EACX,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,aAAa;AAAA,EACb,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,OAAO;AAAA,EACP,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,WAAW;AAAA,EACX,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,KAAK;AAAA,EACL,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,eAAe;AAAA,EACf,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,OAAO;AAAA,EACP,WAAW;AAAA,EACX,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,EACP,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,MAAM;AAAA,EACN,OAAO;AAAA,EACP,YAAY;AAAA,EACZ,OAAO;AAAA,EACP,SAAS;AAAA,EACT,QAAQ;AACV;;;ACpGO,IAAM,+DAA+D;AAAA,EAC1E,2BAA2B;AAAA,EAC3B,8BAA8B;AAAA,EAC9B,QAAQ;AACV;;;ACJO,IAAM,0DAA0D;AAAA,EACrE,MAAM;AAAA,EACN,KAAK;AAAA,EACL,KAAK;AAAA,EACL,OAAO;AAAA,EACP,KAAK;AACP;;;ACNO,IAAM,uEAAuE;AAAA,EAClF,WAAW;AAAA,EACX,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,aAAa;AAAA,EACb,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,OAAO;AAAA,EACP,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,WAAW;AAAA,EACX,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,KAAK;AAAA,EACL,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,eAAe;AAAA,EACf,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,OAAO;AAAA,EACP,WAAW;AAAA,EACX,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,EACP,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,MAAM;AAAA,EACN,OAAO;AAAA,EACP,YAAY;AAAA,EACZ,OAAO;AAAA,EACP,OAAO;AAAA,EACP,SAAS;AAAA,EACT,QAAQ;AACV;;;ACrGO,IAAM,sCAAsC;AAAA,EACjD,mBAAmB;AACrB;;;ACFO,IAAM,oCAAoC;AAAA,EAC/C,iBAAiB;AACnB;;;ACFO,IAAM,wCAAwC;AAAA,EACnD,qBAAqB;AACvB;;;ACFO,IAAM,sCAAsC;AAAA,EACjD,mBAAmB;AACrB;;;ACFO,IAAM,0CAA0C;AAAA,EACrD,sBAAsB;AACxB;;;ACFO,IAAM,wCAAwC;AAAA,EACnD,oBAAoB;AACtB;;;ACFO,IAAM,0CAA0C;AAAA,EACrD,sBAAsB;AACxB;;;AvEgDO,IAAM,4CAA4C,CAGvD,0BACA,YACmB;AACnB,SAAO,aAAAC,QAAM,KAAK,oBAAoB,0BAA0B,OAAO;AACzE;AAoBO,IAAM,2CAA2C,CAGtD,IACA,YACmB;AACnB,SAAO,aAAAC,QAAM,IAAI,oBAAoB,EAAE,IAAI,OAAO;AACpD;AA6QO,IAAM,4CAA4C,CAGvD,kBACA,QACA,YACmB;AACnB,SAAO,aAAAC,QAAM,KAAK,YAAY,kBAAkB;AAAA,IAC9C,GAAG;AAAA,IACH,QAAQ,EAAE,GAAG,QAAQ,GAAG,SAAS,OAAO;AAAA,EAC1C,CAAC;AACH;;;ALjRO,IAAM,gBAAN,cAA4B,YAAY;AAAA,EAAxC;AAAA;AACL,SAAS,OAAO;AAChB,SAAS,eAAqC;AAAA,MAC5C,WAAW;AAAA,MACX,aAAa;AAAA,MACb,gBAAgB;AAAA,MAChB,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,MACjB,cAAc;AAAA;AAAA,IAChB;AAEA,SAAU,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMV,iBAAiB;AACzB,WAAO,MAAM,eAAe,cAAc;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiEA,MAAM,WACJ,OACA,SACoC;AACpC,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,UAAU,KAAK,0BAA0B,OAAO,OAAO;AAG7D,YAAM,WAAW,MAAM;AAAA,QACrB;AAAA,QACA,KAAK,eAAe;AAAA,MACtB;AAEA,YAAM,QAAQ,SAAS,KAAK;AAG5B,UAAI,SAAS,YAAY;AACvB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,MAAM;AAAA,YACJ,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,QAAQ;AAAA,UACV;AAAA,UACA,KAAK,SAAS;AAAA,QAChB;AAAA,MACF;AAGA,aAAO,MAAM,KAAK,kBAAkB,KAAK;AAAA,IAC3C,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,cAA0D;AAC5E,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,WAAW,MAAM;AAAA,QACrB;AAAA,QACA,KAAK,eAAe;AAAA,MACtB;AAEA,aAAO,KAAK,kBAAkB,SAAS,IAAI;AAAA,IAC7C,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,0BACN,OACA,SAC0B;AAE1B,QAAI;AACJ,QAAI,MAAM,SAAS,OAAO;AACxB,iBAAW,MAAM;AAAA,IACnB,OAAO;AACL,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,UAAoC;AAAA,MACxC,WAAW;AAAA,IACb;AAGA,QAAI,SAAS;AAEX,UAAI,QAAQ,YAAY,QAAQ,mBAAmB;AACjD,gBAAQ,kBAAkB;AAAA,UACxB,WAAW,QAAQ,WACf,CAAC,QAAQ,QAAyC,IAClD;AAAA,UACJ,gBAAgB,QAAQ;AAAA,QAC1B;AAAA,MACF;AAGA,UAAI,QAAQ,aAAa;AACvB,gBAAQ,cAAc;AACtB,YAAI,QAAQ,kBAAkB;AAC5B,kBAAQ,qBAAqB;AAAA,YAC3B,oBAAoB,QAAQ;AAAA,UAC9B;AAAA,QACF;AAAA,MACF;AAGA,UAAI,QAAQ,oBAAoB,QAAQ,iBAAiB,SAAS,GAAG;AACnE,gBAAQ,oBAAoB;AAC5B,gBAAQ,2BAA2B;AAAA,UACjC,YAAY,QAAQ;AAAA,QACtB;AAAA,MACF;AAGA,UAAI,QAAQ,eAAe;AACzB,gBAAQ,gBAAgB;AAAA,MAC1B;AAGA,UAAI,QAAQ,mBAAmB;AAC7B,gBAAQ,qBAAqB;AAAA,MAC/B;AAGA,UAAI,QAAQ,iBAAiB;AAC3B,gBAAQ,2BAA2B;AAAA,MACrC;AAGA,UAAI,QAAQ,YAAY;AACtB,gBAAQ,WAAW;AACnB,gBAAQ,kBAAkB;AAAA,UACxB,KAAK,QAAQ;AAAA,QACf;AAAA,MACF;AAGA,UAAI,QAAQ,UAAU;AACpB,gBAAQ,kBAAkB,QAAQ;AAAA,MACpC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,UAA0D;AAElF,UAAM,SAAS,gBAAgB,SAAS,QAAQ,QAAQ;AAGxD,QAAI,SAAS,WAAW,SAAS;AAC/B,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,OAAO;AAAA,UACL,MAAM,SAAS,YAAY,SAAS,KAAK,YAAY;AAAA,UACrD,SAAS;AAAA,UACT,YAAY,SAAS,cAAc;AAAA,QACrC;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAGA,UAAM,SAAS,SAAS;AACxB,UAAM,gBAAgB,QAAQ;AAE9B,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,MAAM;AAAA,QACJ,IAAI,SAAS;AAAA,QACb,MAAM,eAAe,mBAAmB;AAAA,QACxC,YAAY;AAAA;AAAA,QACZ;AAAA,QACA,UAAU,eAAe,YAAY,CAAC;AAAA;AAAA,QACtC,UAAU;AAAA;AAAA,QACV,UAAU,KAAK,gBAAgB,aAAa;AAAA,QAC5C,OAAO,KAAK,aAAa,aAAa;AAAA,QACtC,YAAY,KAAK,kBAAkB,aAAa;AAAA,QAChD,SAAS,QAAQ,eAAe,WAAW;AAAA,QAC3C,UAAU;AAAA,UACR,eAAe,SAAS;AAAA,UACxB,gBAAgB,SAAS;AAAA,QAC3B;AAAA,QACA,WAAW,SAAS;AAAA,QACpB,aAAa,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,eAA6C;AACnE,WAAO;AAAA,MACL,eAAe;AAAA,MACf,CAAC,cAA4B,UAAU;AAAA,MACvC,CAAC,OAAO,WAAW,EAAE;AAAA,IACvB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,eAA6C;AAChE,QAAI,CAAC,eAAe,YAAY;AAC9B,aAAO;AAAA,IACT;AAGA,UAAM,WAAW,cAAc,WAAW;AAAA,MAAQ,CAAC,cACjD,UAAU,MAAM,IAAI,CAAC,UAAmB;AAAA,QACtC;AAAA,QACA,SAAS,UAAU;AAAA,MACrB,EAAE;AAAA,IACJ;AAEA,WAAO,aAAiB,UAAU,CAAC,UAAU;AAAA,MAC3C,MAAM,KAAK,KAAK;AAAA,MAChB,OAAO,KAAK,KAAK;AAAA,MACjB,KAAK,KAAK,KAAK;AAAA,MACf,YAAY,KAAK,KAAK;AAAA,MACtB,SAAS,KAAK,SAAS,SAAS;AAAA,IAClC,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,eAA6C;AACrE,QAAI,CAAC,eAAe,YAAY;AAC9B,aAAO;AAAA,IACT;AAEA,WAAO,cAAc,WAAW,IAAI,CAAC,eAA6B;AAAA,MAChE,MAAM,UAAU;AAAA,MAChB,OAAO,UAAU;AAAA,MACjB,KAAK,UAAU;AAAA,MACf,SAAS,UAAU,SAAS,SAAS;AAAA,MACrC,YAAY,UAAU;AAAA,MACtB,OAAO,UAAU,MAAM,IAAI,CAAC,UAAmB;AAAA,QAC7C,MAAM,KAAK;AAAA,QACX,OAAO,KAAK;AAAA,QACZ,KAAK,KAAK;AAAA,QACV,YAAY,KAAK;AAAA,MACnB,EAAE;AAAA,IACJ,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6CA,MAAM,iBACJ,SACA,WAC2B;AAC3B,SAAK,eAAe;AAGpB,QAAI;AACJ,QAAI,SAAS,YAAY;AACvB,4BAAsB;AAAA,QACpB,QAAQ;AAAA,QACR;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAGA,UAAM,mBAA8C;AAAA,MAClD,UAAU,SAAS,WACd,sBAAsB,QAAQ,UAAU,QAAQ,IACjD;AAAA,MACJ,aAAa;AAAA,MACb,UAAU,SAAS;AAAA,MACnB,aAAa,SAAS;AAAA,IACxB;AAEA,QAAI,SAAS,UAAU;AACrB,uBAAiB,kBAAkB;AAAA,QACjC,WAAW,CAAC,QAAQ,QAAyC;AAAA,MAC/D;AAAA,IACF;AAGA,UAAM,eAAe,MAAM;AAAA,MACzB;AAAA,MACA;AAAA;AAAA,MACA,KAAK,eAAe;AAAA,IACtB;AAEA,UAAM,EAAE,IAAI,KAAK,MAAM,IAAI,aAAa;AAGxC,UAAM,KAAK,IAAI,UAAAC,QAAU,KAAK;AAE9B,QAAI,gBAA+B;AAGnC,2BAAuB,IAAI,WAAW,CAAC,WAAW;AAChD,sBAAgB;AAAA,IAClB,CAAC;AAED,OAAG,GAAG,WAAW,CAAC,SAAiB;AACjC,UAAI;AACF,cAAM,UAAU,KAAK,MAAM,KAAK,SAAS,CAAC;AAG1C,YAAI,QAAQ,SAAS,cAAc;AAEjC,gBAAM,oBAAoB;AAC1B,gBAAM,cAAc,kBAAkB;AACtC,gBAAM,YAAY,YAAY;AAE9B,qBAAW,eAAe;AAAA,YACxB,MAAM;AAAA,YACN,MAAM,UAAU;AAAA,YAChB,SAAS,YAAY;AAAA,YACrB,YAAY,UAAU;AAAA,YACtB,OAAO,UAAU,MAAM,IAAI,CAAC,UAAU;AAAA,cACpC,MAAM,KAAK;AAAA,cACX,OAAO,KAAK;AAAA,cACZ,KAAK,KAAK;AAAA,cACV,YAAY,KAAK;AAAA,YACnB,EAAE;AAAA,YACF,MAAM;AAAA,UACR,CAAC;AAAA,QACH,WAAW,QAAQ,SAAS,aAAa;AAEvC,gBAAM,oBAAoB;AAC1B,gBAAM,cAAc,kBAAkB;AACtC,gBAAM,YAAY,YAAY;AAE9B,gBAAM,gBAAgB;AAAA,YACpB,MAAM,UAAU;AAAA,YAChB,OAAO,UAAU;AAAA,YACjB,KAAK,UAAU;AAAA,YACf,SAAS,UAAU,SAAS,SAAS;AAAA,YACrC,YAAY,UAAU;AAAA,YACtB,OAAO,UAAU,MAAM,IAAI,CAAC,UAAU;AAAA,cACpC,MAAM,KAAK;AAAA,cACX,OAAO,KAAK;AAAA,cACZ,KAAK,KAAK;AAAA,cACV,YAAY,KAAK;AAAA,YACnB,EAAE;AAAA,UACJ;AACA,qBAAW,cAAc,aAAa;AAAA,QACxC,WAAW,QAAQ,SAAS,YAAY;AACtC,qBAAW,aAAa,OAAO;AAAA,QACjC;AAAA,MACF,SAAS,OAAO;AACd,mBAAW,UAAU;AAAA,UACnB,MAAM,YAAY;AAAA,UAClB,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAGD,UAAM,qBAAqB,EAAE;AAG7B,WAAO;AAAA,MACL;AAAA,MACA,UAAU,KAAK;AAAA,MACf,WAAW,oBAAI,KAAK;AAAA,MACpB,WAAW,MAAM;AAAA,MACjB,WAAW,OAAO,UAAsB;AAEtC,gCAAwB,eAAe,GAAG,YAAY,UAAAA,QAAU,IAAI;AAGpE,WAAG,KAAK,MAAM,IAAI;AAGlB,YAAI,MAAM,QAAQ;AAChB,aAAG;AAAA,YACD,KAAK,UAAU;AAAA,cACb,MAAM;AAAA,YACR,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,MACA,OAAO,YAAY;AACjB,YAAI,kBAAkB,YAAY,kBAAkB,WAAW;AAC7D;AAAA,QACF;AAEA,wBAAgB;AAGhB,YAAI,GAAG,eAAe,UAAAA,QAAU,MAAM;AACpC,aAAG;AAAA,YACD,KAAK,UAAU;AAAA,cACb,MAAM;AAAA,YACR,CAAC;AAAA,UACH;AAAA,QACF;AAGA,cAAM,eAAe,EAAE;AACvB,wBAAgB;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,oBAAoB,QAAuC;AACzE,QAAM,UAAU,IAAI,cAAc;AAClC,UAAQ,WAAW,MAAM;AACzB,SAAO;AACT;;;A6EhoBA,IAAAC,aAAsB;;;ACItB,IAAAC,gBAAkB;;;ACVlB,IAAAC,kBAAA;AAAA,SAAAA,iBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACeO,IAAM,+BAA+B;AAAA,EAC1C,SAAS;AAAA,EACT,aAAa;AACf;;;ACJO,IAAM,aAAa;AAAA,EACxB,gBAAgB;AAAA,EAChB,qBAAqB;AAAA,EACrB,YAAY;AAAA,EACZ,iBAAiB;AAAA,EACjB,wBAAwB;AAAA,EACxB,oBAAoB;AAAA,EACpB,MAAM;AAAA,EACN,eAAe;AAAA,EACf,eAAe;AAAA,EACf,iBAAiB;AAAA,EACjB,MAAM;AAAA,EACN,UAAU;AAAA,EACV,eAAe;AAAA,EACf,OAAO;AAAA,EACP,UAAU;AAAA,EACV,kBAAkB;AAAA,EAClB,mBAAmB;AAAA,EACnB,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EACnB,iBAAiB;AAAA,EACjB,cAAc;AAAA,EACd,aAAa;AAAA,EACb,iBAAiB;AAAA,EACjB,YAAY;AAAA,EACZ,cAAc;AAAA,EACd,iBAAiB;AAAA,EACjB,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,aAAa;AAAA,EACb,cAAc;AAAA,EACd,oBAAoB;AAAA,EACpB,uBAAuB;AAAA,EACvB,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,KAAK;AAAA,EACL,2BAA2B;AAAA,EAC3B,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,aAAa;AACf;;;AC5CO,IAAM,aAAa;AAAA,EACxB,+BAA+B;AAAA,EAC/B,2BAA2B;AAAA,EAC3B,4BAA4B;AAC9B;;;ACLO,IAAM,YAAY;AAAA,EACvB,gBAAgB;AAAA,EAChB,qBAAqB;AAAA,EACrB,YAAY;AAAA,EACZ,iBAAiB;AAAA,EACjB,wBAAwB;AAAA,EACxB,oBAAoB;AAAA,EACpB,MAAM;AAAA,EACN,eAAe;AAAA,EACf,eAAe;AAAA,EACf,iBAAiB;AAAA,EACjB,MAAM;AAAA,EACN,UAAU;AAAA,EACV,eAAe;AAAA,EACf,OAAO;AAAA,EACP,UAAU;AAAA,EACV,kBAAkB;AAAA,EAClB,mBAAmB;AAAA,EACnB,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EACnB,iBAAiB;AAAA,EACjB,cAAc;AAAA,EACd,aAAa;AAAA,EACb,iBAAiB;AAAA,EACjB,YAAY;AAAA,EACZ,cAAc;AAAA,EACd,iBAAiB;AAAA,EACjB,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,aAAa;AAAA,EACb,cAAc;AAAA,EACd,oBAAoB;AAAA,EACpB,uBAAuB;AAAA,EACvB,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,KAAK;AAAA,EACL,2BAA2B;AAAA,EAC3B,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,aAAa;AACf;;;AC7CO,IAAM,sBAAsB;AAAA,EACjC,sBAAsB;AACxB;;;ACDO,IAAM,wBAAwB;AAAA,EACnC,KAAK;AAAA,EACL,KAAK;AACP;;;ACPO,IAAM,YAAY;AAAA,EACvB,UAAU;AAAA,EACV,SAAS;AAAA,EACT,UAAU;AACZ;;;ACDO,IAAM,cAAc;AAAA,EACzB,MAAM;AAAA,EACN,UAAU;AAAA,EACV,WAAW;AACb;;;ACJO,IAAM,qBAAqB;AAAA,EAChC,aAAa;AAAA,EACb,MAAM;AACR;;;ACHO,IAAM,iBAAiB;AAAA,EAC5B,KAAK;AAAA,EACL,KAAK;AACP;;;ACHO,IAAM,eAAe;AAAA,EAC1B,aAAa;AAAA,EACb,gBAAgB;AAAA,EAChB,QAAQ;AACV;;;ACJO,IAAM,cAAc;AAAA,EACzB,SAAS;AAAA,EACT,iBAAiB;AAAA,EACjB,MAAM;AAAA,EACN,UAAU;AAAA,EACV,WAAW;AACb;;;ACNO,IAAM,uBAAuB;AAAA,EAClC,KAAK;AAAA,EACL,SAAS;AAAA,EACT,MAAM;AACR;;;ACDO,IAAM,yBAAyB;AAAA,EACpC,IAAI;AAAA,EACJ,OAAO;AAAA,EACP,OAAO;AAAA,EACP,OAAO;AAAA,EACP,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,KAAK;AAAA,EACL,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AACN;;;ACzGO,IAAM,wBAAwB;AAAA,EACnC,WAAW;AAAA,EACX,OAAO;AACT;;;ACJO,IAAM,mBAAmB;AAAA,EAC9B,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,WAAW;AAAA,EACX,OAAO;AACT;;;AjBkDO,IAAM,mBAAmB,CAC9B,kBACA,YACmB;AACnB,SAAO,cAAAC,QAAM,KAAK,kBAAkB,kBAAkB,OAAO;AAC/D;AA2BO,IAAM,gBAAgB,CAC3B,cACA,YACmB;AACnB,SAAO,cAAAC,QAAM,IAAI,kBAAkB,YAAY,IAAI,OAAO;AAC5D;;;ADZO,IAAM,oBAAN,cAAgC,YAAY;AAAA,EAA5C;AAAA;AACL,SAAS,OAAO;AAChB,SAAS,eAAqC;AAAA,MAC5C,WAAW;AAAA,MACX,aAAa;AAAA,MACb,gBAAgB;AAAA,MAChB,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,MACjB,cAAc;AAAA,IAChB;AAEA,SAAU,UAAU;AACpB;AAAA,SAAQ,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMV,iBAAiB;AACzB,WAAO,MAAM,eAAe,eAAe;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqEA,MAAM,WACJ,OACA,SACoC;AACpC,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,UAAU,KAAK,0BAA0B,OAAO,OAAO;AAG7D,YAAM,WAAW,MAAM,iBAAiB,SAAS,KAAK,eAAe,CAAC;AAEtE,YAAM,eAAe,SAAS,KAAK;AAGnC,UAAI,SAAS,YAAY;AACvB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,MAAM;AAAA,YACJ,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,QAAQ;AAAA,UACV;AAAA,UACA,KAAK,SAAS;AAAA,QAChB;AAAA,MACF;AAGA,aAAO,MAAM,KAAK,kBAAkB,YAAY;AAAA,IAClD,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,cAA0D;AAC5E,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,WAAW,MAAM,cAAiB,cAAc,KAAK,eAAe,CAAC;AAE3E,aAAO,KAAK,kBAAkB,SAAS,IAAI;AAAA,IAC7C,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,0BACN,OACA,SACkB;AAElB,QAAI;AACJ,QAAI,MAAM,SAAS,OAAO;AACxB,iBAAW,MAAM;AAAA,IACnB,OAAO;AACL,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,UAA4B;AAAA,MAChC,WAAW;AAAA,IACb;AAGA,QAAI,SAAS;AAEX,UAAI,QAAQ,UAAU;AAEpB,cAAM,eAAe,QAAQ,SAAS,SAAS,GAAG,IAC9C,QAAQ,WACR,GAAG,QAAQ,QAAQ;AACvB,gBAAQ,gBAAgB;AAAA,MAC1B;AAEA,UAAI,QAAQ,mBAAmB;AAC7B,gBAAQ,qBAAqB;AAAA,MAC/B;AAGA,UAAI,QAAQ,aAAa;AACvB,gBAAQ,iBAAiB;AACzB,YAAI,QAAQ,kBAAkB;AAC5B,kBAAQ,oBAAoB,QAAQ;AAAA,QACtC;AAAA,MACF;AAGA,UAAI,QAAQ,oBAAoB,QAAQ,iBAAiB,SAAS,GAAG;AACnE,gBAAQ,aAAa,QAAQ;AAC7B,gBAAQ,cAAc;AAAA,MACxB;AAGA,UAAI,QAAQ,eAAe;AACzB,gBAAQ,gBAAgB;AACxB,gBAAQ,gBAAgB;AACxB,gBAAQ,eAAe;AAAA,MACzB;AAGA,UAAI,QAAQ,mBAAmB;AAC7B,gBAAQ,qBAAqB;AAAA,MAC/B;AAGA,UAAI,QAAQ,iBAAiB;AAC3B,gBAAQ,mBAAmB;AAAA,MAC7B;AAGA,UAAI,QAAQ,cAAc;AACxB,gBAAQ,aAAa;AAAA,MACvB;AAGA,UAAI,QAAQ,YAAY;AACtB,gBAAQ,cAAc,QAAQ;AAAA,MAChC;AAIA,cAAQ,YAAY;AACpB,cAAQ,cAAc;AAAA,IACxB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,UAAiD;AAEzE,QAAI;AACJ,YAAQ,SAAS,QAAQ;AAAA,MACvB,KAAK;AACH,iBAAS;AACT;AAAA,MACF,KAAK;AACH,iBAAS;AACT;AAAA,MACF,KAAK;AACH,iBAAS;AACT;AAAA,MACF,KAAK;AACH,iBAAS;AACT;AAAA,MACF;AACE,iBAAS;AAAA,IACb;AAGA,QAAI,SAAS,WAAW,SAAS;AAC/B,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,OAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS,SAAS,SAAS;AAAA,QAC7B;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,MAAM;AAAA,QACJ,IAAI,SAAS;AAAA,QACb,MAAM,SAAS,QAAQ;AAAA,QACvB,YAAY,SAAS,eAAe,OAAO,SAAS,aAAa;AAAA,QACjE;AAAA,QACA,UAAU,SAAS;AAAA,QACnB,UAAU,SAAS,iBAAiB,SAAS,iBAAiB,MAAO;AAAA;AAAA,QACrE,UAAU,KAAK,gBAAgB,QAAQ;AAAA,QACvC,OAAO,KAAK,aAAa,QAAQ;AAAA,QACjC,YAAY,KAAK,kBAAkB,QAAQ;AAAA,QAC3C,SAAS,SAAS,WAAW;AAAA,QAC7B,UAAU;AAAA,UACR,UAAU,SAAS;AAAA,UACnB,UAAU,SAAS;AAAA,UACnB,mBAAmB,SAAS;AAAA,UAC5B,mBAAmB,SAAS;AAAA,QAC9B;AAAA,MACF;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,YAAwB;AAC9C,QAAI,CAAC,WAAW,cAAc,WAAW,WAAW,WAAW,GAAG;AAChE,aAAO;AAAA,IACT;AAGA,UAAM,aAAa,oBAAI,IAAY;AACnC,eAAW,WAAW,QAAQ,CAAC,cAAmC;AAChE,UAAI,UAAU,SAAS;AACrB,mBAAW,IAAI,UAAU,OAAO;AAAA,MAClC;AAAA,IACF,CAAC;AAED,QAAI,WAAW,SAAS,GAAG;AACzB,aAAO;AAAA,IACT;AAEA,WAAO,MAAM,KAAK,UAAU,EAAE,IAAI,CAAC,eAAe;AAAA,MAChD,IAAI;AAAA,MACJ,OAAO;AAAA;AAAA,IACT,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,YAAwB;AAC3C,QAAI,CAAC,WAAW,SAAS,WAAW,MAAM,WAAW,GAAG;AACtD,aAAO;AAAA,IACT;AAEA,WAAO,WAAW,MAAM,IAAI,CAAC,UAA0B;AAAA,MACrD,MAAM,KAAK;AAAA,MACX,OAAO,KAAK,QAAQ;AAAA;AAAA,MACpB,KAAK,KAAK,MAAM;AAAA;AAAA,MAChB,YAAY,KAAK;AAAA,MACjB,SAAS,KAAK,WAAW;AAAA,IAC3B,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,YAAwB;AAChD,QAAI,CAAC,WAAW,cAAc,WAAW,WAAW,WAAW,GAAG;AAChE,aAAO;AAAA,IACT;AAEA,WAAO,WAAW,WAAW,IAAI,CAAC,eAAoC;AAAA,MACpE,MAAM,UAAU;AAAA,MAChB,OAAO,UAAU,QAAQ;AAAA;AAAA,MACzB,KAAK,UAAU,MAAM;AAAA;AAAA,MACrB,SAAS,UAAU,WAAW;AAAA,MAC9B,YAAY,UAAU;AAAA,MACtB,OAAO,UAAU,MAAM,IAAI,CAAC,UAA0B;AAAA,QACpD,MAAM,KAAK;AAAA,QACX,OAAO,KAAK,QAAQ;AAAA,QACpB,KAAK,KAAK,MAAM;AAAA,QAChB,YAAY,KAAK;AAAA,MACnB,EAAE;AAAA,IACJ,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwCA,MAAM,iBACJ,SACA,WAC2B;AAC3B,SAAK,eAAe;AAEpB,QAAI,CAAC,KAAK,QAAQ,QAAQ;AACxB,YAAM,IAAI,MAAM,mCAAmC;AAAA,IACrD;AAIA,UAAM,aAAa,SAAS,cAAc;AAC1C,UAAM,WAAW,SAAS,YAAY;AACtC,UAAM,QAAQ,GAAG,KAAK,SAAS,gBAAgB,UAAU,aAAa,QAAQ;AAG9E,UAAM,KAAK,IAAI,WAAAC,QAAU,OAAO;AAAA,MAC9B,SAAS;AAAA,QACP,eAAe,KAAK,OAAO;AAAA,MAC7B;AAAA,IACF,CAAC;AAED,QAAI,gBAA8D;AAClE,UAAM,YAAY,cAAc,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,UAAU,CAAC,CAAC;AAGrF,OAAG,GAAG,QAAQ,MAAM;AAClB,sBAAgB;AAChB,iBAAW,SAAS;AAAA,IACtB,CAAC;AAED,OAAG,GAAG,WAAW,CAAC,SAAiB;AACjC,UAAI;AACF,cAAM,UAAU,KAAK,MAAM,KAAK,SAAS,CAAC;AAI1C,YAAI,WAAW,SAAS;AAEtB,qBAAW,UAAU;AAAA,YACnB,MAAM;AAAA,YACN,SAAU,QAAuB;AAAA,UACnC,CAAC;AACD;AAAA,QACF;AAGA,YAAK,QAAsD,SAAS,SAAS;AAE3E,gBAAM,WAAW;AACjB,qBAAW,aAAa;AAAA,YACtB,WAAW,SAAS;AAAA,YACpB,WAAW,IAAI,KAAK,SAAS,UAAU,EAAE,YAAY;AAAA,UACvD,CAAC;AAAA,QACH,WAAY,QAAsD,SAAS,QAAQ;AAEjF,gBAAM,UAAU;AAEhB,qBAAW,eAAe;AAAA,YACxB,MAAM;AAAA,YACN,MAAM,QAAQ;AAAA,YACd,SAAS,QAAQ;AAAA,YACjB,YAAY,QAAQ;AAAA,YACpB,OAAO,QAAQ,MAAM,IAAI,CAAC,UAAyB;AAAA,cACjD,MAAM,KAAK;AAAA,cACX,OAAO,KAAK,QAAQ;AAAA;AAAA,cACpB,KAAK,KAAK,MAAM;AAAA,cAChB,YAAY,KAAK;AAAA,YACnB,EAAE;AAAA,YACF,MAAM;AAAA,UACR,CAAC;AAAA,QACH,WAAY,QAAsD,SAAS,eAAe;AAExF,gBAAM,UAAU;AAChB,qBAAW,aAAa;AAAA,YACtB,YAAY;AAAA,YACZ,sBAAsB,QAAQ;AAAA,YAC9B,wBAAwB,QAAQ;AAAA,UAClC,CAAC;AAAA,QACH;AAAA,MACF,SAAS,OAAO;AACd,mBAAW,UAAU;AAAA,UACnB,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAED,OAAG,GAAG,SAAS,CAAC,UAAiB;AAC/B,iBAAW,UAAU;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,MAAM;AAAA,QACf,SAAS;AAAA,MACX,CAAC;AAAA,IACH,CAAC;AAED,OAAG,GAAG,SAAS,CAAC,MAAc,WAAmB;AAC/C,sBAAgB;AAChB,iBAAW,UAAU,MAAM,OAAO,SAAS,CAAC;AAAA,IAC9C,CAAC;AAGD,UAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,YAAM,UAAU,WAAW,MAAM;AAC/B,eAAO,IAAI,MAAM,8BAA8B,CAAC;AAAA,MAClD,GAAG,GAAK;AAER,SAAG,KAAK,QAAQ,MAAM;AACpB,qBAAa,OAAO;AACpB,gBAAQ;AAAA,MACV,CAAC;AAED,SAAG,KAAK,SAAS,CAAC,UAAU;AAC1B,qBAAa,OAAO;AACpB,eAAO,KAAK;AAAA,MACd,CAAC;AAAA,IACH,CAAC;AAGD,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,UAAU,KAAK;AAAA,MACf,WAAW,oBAAI,KAAK;AAAA,MACpB,WAAW,MAAM;AAAA,MACjB,WAAW,OAAO,UAAsB;AACtC,YAAI,kBAAkB,QAAQ;AAC5B,gBAAM,IAAI,MAAM,iCAAiC,aAAa,EAAE;AAAA,QAClE;AAEA,YAAI,GAAG,eAAe,WAAAA,QAAU,MAAM;AACpC,gBAAM,IAAI,MAAM,uBAAuB;AAAA,QACzC;AAGA,cAAM,cAAc,MAAM,KAAK,SAAS,QAAQ;AAGhD,WAAG;AAAA,UACD,KAAK,UAAU;AAAA,YACb,YAAY;AAAA,UACd,CAAC;AAAA,QACH;AAGA,YAAI,MAAM,QAAQ;AAChB,aAAG;AAAA,YACD,KAAK,UAAU;AAAA,cACb,mBAAmB;AAAA,YACrB,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,MACA,OAAO,YAAY;AACjB,YAAI,kBAAkB,YAAY,kBAAkB,WAAW;AAC7D;AAAA,QACF;AAEA,wBAAgB;AAGhB,YAAI,GAAG,eAAe,WAAAA,QAAU,MAAM;AACpC,aAAG;AAAA,YACD,KAAK,UAAU;AAAA,cACb,mBAAmB;AAAA,YACrB,CAAC;AAAA,UACH;AAAA,QACF;AAGA,eAAO,IAAI,QAAc,CAAC,YAAY;AACpC,gBAAM,UAAU,WAAW,MAAM;AAC/B,eAAG,UAAU;AACb,oBAAQ;AAAA,UACV,GAAG,GAAI;AAEP,aAAG,MAAM;AAET,aAAG,KAAK,SAAS,MAAM;AACrB,yBAAa,OAAO;AACpB,4BAAgB;AAChB,oBAAQ;AAAA,UACV,CAAC;AAAA,QACH,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,wBAAwB,QAA2C;AACjF,QAAM,UAAU,IAAI,kBAAkB;AACtC,UAAQ,WAAW,MAAM;AACzB,SAAO;AACT;;;AmBzqBA,IAAAC,gBAA0C;AAC1C,IAAAC,aAAsB;AAyGf,IAAM,kBAAN,cAA8B,YAAY;AAAA,EAA1C;AAAA;AACL,SAAS,OAAO;AAChB,SAAS,eAAqC;AAAA,MAC5C,WAAW;AAAA,MACX,aAAa;AAAA,MACb,gBAAgB;AAAA,MAChB,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,MACjB,cAAc;AAAA,IAChB;AAGA,SAAU,UAAU;AACpB,SAAQ,YAAY;AAAA;AAAA,EAEpB,WAAW,QAA8B;AACvC,UAAM,WAAW,MAAM;AAEvB,SAAK,SAAS,cAAAC,QAAM,OAAO;AAAA,MACzB,SAAS,OAAO,WAAW,KAAK;AAAA,MAChC,SAAS,OAAO,WAAW;AAAA,MAC3B,SAAS;AAAA,QACP,eAAe,SAAS,OAAO,MAAM;AAAA,QACrC,gBAAgB;AAAA,QAChB,GAAG,OAAO;AAAA,MACZ;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6CA,MAAM,WACJ,OACA,SACoC;AACpC,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,SAAS,KAAK,yBAAyB,OAAO;AAEpD,UAAI;AAEJ,UAAI,MAAM,SAAS,OAAO;AAExB,mBAAW,MAAM,KAAK,OAAQ;AAAA,UAC5B;AAAA,UACA,EAAE,KAAK,MAAM,IAAI;AAAA,UACjB,EAAE,OAAO;AAAA,QACX,EAAE,KAAK,CAAC,QAAQ,IAAI,IAAI;AAAA,MAC1B,WAAW,MAAM,SAAS,QAAQ;AAEhC,mBAAW,MAAM,KAAK,OAAQ,KAAuB,WAAW,MAAM,MAAM;AAAA,UAC1E;AAAA,UACA,SAAS;AAAA,YACP,gBAAgB;AAAA,UAClB;AAAA,QACF,CAAC,EAAE,KAAK,CAAC,QAAQ,IAAI,IAAI;AAAA,MAC3B,OAAO;AACL,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAGA,aAAO,KAAK,kBAAkB,QAAQ;AAAA,IACxC,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,cAAc,cAA0D;AAC5E,SAAK,eAAe;AAIpB,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SACE;AAAA,MACJ;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,yBAAyB,SAA4D;AAC3F,UAAM,SAAwC,CAAC;AAE/C,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAGA,QAAI,QAAQ,UAAU;AACpB,aAAO,WAAW,QAAQ;AAAA,IAC5B;AAEA,QAAI,QAAQ,mBAAmB;AAC7B,aAAO,kBAAkB;AAAA,IAC3B;AAGA,QAAI,QAAQ,aAAa;AACvB,aAAO,UAAU;AAAA,IACnB;AAGA,QAAI,QAAQ,oBAAoB,QAAQ,iBAAiB,SAAS,GAAG;AACnE,aAAO,WAAW,QAAQ;AAAA,IAC5B;AAGA,QAAI,QAAQ,eAAe;AACzB,aAAO,YAAY;AAAA,IACrB;AAGA,QAAI,QAAQ,mBAAmB;AAC7B,aAAO,YAAY;AAAA,IACrB;AAGA,QAAI,QAAQ,iBAAiB;AAC3B,aAAO,kBAAkB;AAAA,IAC3B;AAGA,QAAI,QAAQ,cAAc;AACxB,aAAO,SAAS,CAAC,OAAO,KAAK;AAAA,IAC/B;AAGA,QAAI,QAAQ,YAAY;AACtB,aAAO,WAAW,QAAQ;AAAA,IAC5B;AAGA,WAAO,YAAY;AACnB,WAAO,aAAa;AACpB,WAAO,eAAe;AAEtB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,UAAuD;AAE/E,UAAM,UAAU,SAAS,QAAQ,WAAW,CAAC;AAC7C,UAAM,cAAc,SAAS,eAAe,CAAC;AAE7C,QAAI,CAAC,aAAa;AAChB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,OAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,MAAM;AAAA,QACJ,IAAI,SAAS,UAAU,cAAc;AAAA,QACrC,MAAM,YAAY,cAAc;AAAA,QAChC,YAAY,YAAY;AAAA,QACxB,QAAQ;AAAA;AAAA,QACR,UAAU,SAAS,qBAAqB;AAAA,QACxC,UAAU,SAAS,UAAU;AAAA,QAC7B,UAAU,KAAK,gBAAgB,QAAQ;AAAA,QACvC,OAAO,KAAK,aAAa,WAAW;AAAA,QACpC,YAAY,KAAK,kBAAkB,QAAQ;AAAA,QAC3C,SAAS,KAAK,eAAe,WAAW;AAAA,QACxC,UAAU;AAAA,UACR,WAAW,SAAS,UAAU;AAAA,UAC9B,UAAU,SAAS,UAAU;AAAA,UAC7B,WAAW,SAAS,QAAQ;AAAA,UAC5B,SAAS,SAAS,QAAQ;AAAA,UAC1B,QAAQ,SAAS,QAAQ;AAAA,QAC3B;AAAA,MACF;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,UAA4B;AAClD,UAAM,aAAa,SAAS,QAAQ;AAEpC,QAAI,CAAC,cAAc,WAAW,WAAW,GAAG;AAC1C,aAAO;AAAA,IACT;AAGA,UAAM,aAAa,oBAAI,IAAY;AACnC,eAAW,QAAQ,CAAC,cAAqD;AACvE,UAAI,UAAU,YAAY,QAAW;AACnC,mBAAW,IAAI,UAAU,OAAO;AAAA,MAClC;AAAA,IACF,CAAC;AAED,QAAI,WAAW,SAAS,GAAG;AACzB,aAAO;AAAA,IACT;AAEA,WAAO,MAAM,KAAK,UAAU,EAAE,IAAI,CAAC,eAAe;AAAA,MAChD,IAAI,UAAU,SAAS;AAAA,MACvB,OAAO,WAAW,SAAS;AAAA,IAC7B,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,aAAkE;AACrF,QAAI,CAAC,YAAY,SAAS,YAAY,MAAM,WAAW,GAAG;AACxD,aAAO;AAAA,IACT;AAEA,WAAO,YAAY,MAAM;AAAA,MACvB,CAAC,UAAwE;AAAA,QACvE,MAAM,KAAK,QAAQ;AAAA,QACnB,OAAO,KAAK,SAAS;AAAA,QACrB,KAAK,KAAK,OAAO;AAAA,QACjB,YAAY,KAAK;AAAA,QACjB,SAAS;AAAA;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,UAA4B;AACpD,UAAM,aAAa,SAAS,QAAQ;AAEpC,QAAI,CAAC,cAAc,WAAW,WAAW,GAAG;AAC1C,aAAO;AAAA,IACT;AAEA,WAAO,WAAW,IAAI,CAAC,eAAsD;AAAA,MAC3E,MAAM,UAAU,cAAc;AAAA,MAC9B,OAAO,UAAU,SAAS;AAAA,MAC1B,KAAK,UAAU,OAAO;AAAA,MACtB,SAAS,UAAU,SAAS,SAAS;AAAA,MACrC,YAAY,UAAU;AAAA,MACtB,OAAO,UAAU,OAAO,IAAI,CAAC,UAAU;AAAA,QACrC,MAAM,KAAK,QAAQ;AAAA,QACnB,OAAO,KAAK,SAAS;AAAA,QACrB,KAAK,KAAK,OAAO;AAAA,QACjB,YAAY,KAAK;AAAA,MACnB,EAAE;AAAA,IACJ,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKQ,eACN,aACoB;AACpB,QAAI,CAAC,YAAY,aAAa,YAAY,UAAU,WAAW,GAAG;AAChE,aAAO;AAAA,IACT;AAGA,WAAO,YAAY,UAChB,IAAI,CAAC,YAAY,QAAQ,OAAO,EAChC,OAAO,OAAO,EACd,KAAK,GAAG;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA0CA,MAAM,iBACJ,SACA,WAC2B;AAC3B,SAAK,eAAe;AAGpB,UAAM,SAAS,IAAI,gBAAgB;AAEnC,QAAI,SAAS,SAAU,QAAO,OAAO,YAAY,QAAQ,QAAQ;AACjE,QAAI,SAAS,WAAY,QAAO,OAAO,eAAe,QAAQ,WAAW,SAAS,CAAC;AACnF,QAAI,SAAS,SAAU,QAAO,OAAO,YAAY,QAAQ,SAAS,SAAS,CAAC;AAC5E,QAAI,SAAS,SAAU,QAAO,OAAO,YAAY,QAAQ,QAAQ;AACjE,QAAI,SAAS,kBAAmB,QAAO,OAAO,mBAAmB,MAAM;AACvE,QAAI,SAAS,YAAa,QAAO,OAAO,WAAW,MAAM;AACzD,QAAI,SAAS,eAAgB,QAAO,OAAO,mBAAmB,MAAM;AACpE,QAAI,SAAS,cAAe,QAAO,OAAO,aAAa,MAAM;AAC7D,QAAI,SAAS,kBAAmB,QAAO,OAAO,aAAa,MAAM;AACjE,QAAI,SAAS,gBAAiB,QAAO,OAAO,mBAAmB,MAAM;AACrE,QAAI,SAAS,aAAc,QAAO,OAAO,UAAU,KAAK;AACxD,QAAI,SAAS,oBAAoB,QAAQ,iBAAiB,SAAS,GAAG;AACpE,aAAO,OAAO,YAAY,QAAQ,iBAAiB,KAAK,GAAG,CAAC;AAAA,IAC9D;AAEA,UAAM,QAAQ,GAAG,KAAK,SAAS,IAAI,OAAO,SAAS,CAAC;AAGpD,UAAM,KAAK,IAAI,WAAAC,QAAU,OAAO;AAAA,MAC9B,SAAS;AAAA,QACP,eAAe,SAAS,KAAK,OAAQ,MAAM;AAAA,MAC7C;AAAA,IACF,CAAC;AAED,QAAI,gBAA8D;AAClE,UAAM,YAAY,YAAY,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,UAAU,CAAC,CAAC;AAGnF,OAAG,GAAG,QAAQ,MAAM;AAClB,sBAAgB;AAChB,iBAAW,SAAS;AAAA,IACtB,CAAC;AAED,OAAG,GAAG,WAAW,CAAC,SAAiB;AACjC,UAAI;AACF,cAAM,UAAU,KAAK,MAAM,KAAK,SAAS,CAAC;AAG1C,YAAI,QAAQ,SAAS,WAAW;AAE9B,gBAAM,UAAU,QAAQ,QAAQ,aAAa,CAAC;AAE9C,cAAI,SAAS;AACX,kBAAM,aAAa,QAAQ;AAC3B,kBAAM,UAAU,QAAQ;AACxB,kBAAM,QAAQ,QAAQ,OAAO,IAAI,CAAC,UAAU;AAAA,cAC1C,MAAM,KAAK;AAAA,cACX,OAAO,KAAK;AAAA,cACZ,KAAK,KAAK;AAAA,cACV,YAAY,KAAK;AAAA,YACnB,EAAE;AAEF,uBAAW,eAAe;AAAA,cACxB,MAAM;AAAA,cACN,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA,YAAY,QAAQ;AAAA,cACpB,MAAM;AAAA,YACR,CAAC;AAAA,UACH;AAAA,QACF,WAAW,QAAQ,SAAS,gBAAgB;AAE1C,qBAAW,aAAa,OAAO;AAAA,QACjC,WAAW,QAAQ,SAAS,YAAY;AAEtC,qBAAW,aAAa,OAAO;AAAA,QACjC;AAAA,MACF,SAAS,OAAO;AACd,mBAAW,UAAU;AAAA,UACnB,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAED,OAAG,GAAG,SAAS,CAAC,UAAiB;AAC/B,iBAAW,UAAU;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,MAAM;AAAA,QACf,SAAS;AAAA,MACX,CAAC;AAAA,IACH,CAAC;AAED,OAAG,GAAG,SAAS,CAAC,MAAc,WAAmB;AAC/C,sBAAgB;AAChB,iBAAW,UAAU,MAAM,OAAO,SAAS,CAAC;AAAA,IAC9C,CAAC;AAGD,UAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,YAAM,UAAU,WAAW,MAAM;AAC/B,eAAO,IAAI,MAAM,8BAA8B,CAAC;AAAA,MAClD,GAAG,GAAK;AAER,SAAG,KAAK,QAAQ,MAAM;AACpB,qBAAa,OAAO;AACpB,gBAAQ;AAAA,MACV,CAAC;AAED,SAAG,KAAK,SAAS,CAAC,UAAU;AAC1B,qBAAa,OAAO;AACpB,eAAO,KAAK;AAAA,MACd,CAAC;AAAA,IACH,CAAC;AAGD,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,UAAU,KAAK;AAAA,MACf,WAAW,oBAAI,KAAK;AAAA,MACpB,WAAW,MAAM;AAAA,MACjB,WAAW,OAAO,UAAsB;AACtC,YAAI,kBAAkB,QAAQ;AAC5B,gBAAM,IAAI,MAAM,iCAAiC,aAAa,EAAE;AAAA,QAClE;AAEA,YAAI,GAAG,eAAe,WAAAA,QAAU,MAAM;AACpC,gBAAM,IAAI,MAAM,uBAAuB;AAAA,QACzC;AAGA,WAAG,KAAK,MAAM,IAAI;AAGlB,YAAI,MAAM,QAAQ;AAChB,aAAG,KAAK,KAAK,UAAU,EAAE,MAAM,cAAc,CAAC,CAAC;AAAA,QACjD;AAAA,MACF;AAAA,MACA,OAAO,YAAY;AACjB,YAAI,kBAAkB,YAAY,kBAAkB,WAAW;AAC7D;AAAA,QACF;AAEA,wBAAgB;AAGhB,YAAI,GAAG,eAAe,WAAAA,QAAU,MAAM;AACpC,aAAG,KAAK,KAAK,UAAU,EAAE,MAAM,cAAc,CAAC,CAAC;AAAA,QACjD;AAGA,eAAO,IAAI,QAAc,CAAC,YAAY;AACpC,gBAAM,UAAU,WAAW,MAAM;AAC/B,eAAG,UAAU;AACb,oBAAQ;AAAA,UACV,GAAG,GAAI;AAEP,aAAG,MAAM;AAET,aAAG,KAAK,SAAS,MAAM;AACrB,yBAAa,OAAO;AACpB,4BAAgB;AAChB,oBAAQ;AAAA,UACV,CAAC;AAAA,QACH,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,sBAAsB,QAAyC;AAC7E,QAAM,UAAU,IAAI,gBAAgB;AACpC,UAAQ,WAAW,MAAM;AACzB,SAAO;AACT;;;ACtpBA,IAAAC,gBAAkB;;;ACKlB,IAAAC,gBAAkB;AAq0BX,IAAM,uBAAuB,CAClC,eACA,YACmB;AACnB,SAAO,cAAAC,QAAM,KAAK,mBAAmB,eAAe,OAAO;AAC7D;AAKO,IAAM,oBAAoB,CAC/B,IACA,YACmB;AACnB,SAAO,cAAAA,QAAM,IAAI,mBAAmB,EAAE,IAAI,OAAO;AACnD;AA0BO,IAAM,0BAA0B,CACrC,IACA,QACA,YACmB;AACnB,SAAO,cAAAC,QAAM,IAAI,mBAAmB,EAAE,UAAU;AAAA,IAC9C,GAAG;AAAA,IACH,QAAQ,EAAE,GAAG,QAAQ,GAAG,SAAS,OAAO;AAAA,EAC1C,CAAC;AACH;;;AD/xBO,IAAM,kBAAN,cAA8B,YAAY;AAAA,EAA1C;AAAA;AACL,SAAS,OAAO;AAChB,SAAS,eAAqC;AAAA,MAC5C,WAAW;AAAA;AAAA,MACX,aAAa;AAAA,MACb,gBAAgB;AAAA,MAChB,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,MACjB,cAAc;AAAA,IAChB;AAGA,SAAU,UAAU;AAAA;AAAA;AAAA,EAEpB,WAAW,QAAoD;AAC7D,UAAM,WAAW,MAAM;AAEvB,SAAK,SAAS,OAAO,UAAU;AAC/B,SAAK,UACH,OAAO,WAAW,WAAW,KAAK,MAAM;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMU,iBAAiB;AACzB,WAAO,MAAM,eAAe,2BAA2B;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,WACJ,OACA,SACoC;AACpC,SAAK,eAAe;AAEpB,QAAI,MAAM,SAAS,OAAO;AACxB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,OAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,MACF;AAAA,IACF;AAEA,QAAI;AACF,YAAM,uBAA+C;AAAA,QACnD,aAAc,SAAS,UAAU,eAA0B;AAAA,QAC3D,aAAc,SAAS,UAAU,eAA0B;AAAA,QAC3D,QAAQ,SAAS,YAAY;AAAA,QAC7B,aAAa,CAAC,MAAM,GAAG;AAAA,QACvB,YAAY,KAAK,6BAA6B,OAAO;AAAA,MACvD;AAGA,YAAM,WAAW,MAAM;AAAA,QACrB;AAAA,QACA,KAAK,eAAe;AAAA,MACtB;AAEA,YAAM,gBAAgB,SAAS;AAE/B,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,MAAM;AAAA,UACJ,IAAI,cAAc,MAAM,MAAM,GAAG,EAAE,IAAI,KAAK;AAAA,UAC5C,MAAM;AAAA;AAAA,UACN,QAAQ,KAAK,gBAAgB,cAAc,MAAM;AAAA,UACjD,UAAU,cAAc;AAAA,UACxB,WAAW,cAAc;AAAA,QAC3B;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,cAAc,cAA0D;AAC5E,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,iBAAiB,MAAM,kBAAkB,cAAc,KAAK,eAAe,CAAC;AAElF,YAAM,gBAAgB,eAAe;AACrC,YAAM,SAAS,KAAK,gBAAgB,cAAc,MAAM;AAExD,UAAI,WAAW,aAAa;AAC1B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,MAAM;AAAA,YACJ,IAAI;AAAA,YACJ,MAAM;AAAA,YACN;AAAA,YACA,UAAU,cAAc;AAAA,YACxB,WAAW,cAAc;AAAA,UAC3B;AAAA,UACA,KAAK;AAAA,QACP;AAAA,MACF;AAGA,UAAI,CAAC,cAAc,OAAO,OAAO;AAC/B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,OAAO;AAAA,YACL,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,UACA,KAAK;AAAA,QACP;AAAA,MACF;AAEA,YAAM,gBAAgB,MAAM;AAAA,QAC1B;AAAA,QACA;AAAA,QACA,KAAK,eAAe;AAAA,MACtB;AACA,YAAM,QAAQ,cAAc,MAAM,UAAU,CAAC;AAG7C,YAAM,aAAa,MAAM,KAAK,CAAC,SAAc,KAAK,SAAS,eAAe;AAE1E,UAAI,CAAC,YAAY,OAAO,YAAY;AAClC,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,OAAO;AAAA,YACL,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,UACA,KAAK;AAAA,QACP;AAAA,MACF;AAGA,YAAM,kBAAkB,MAAM,cAAAC,QAAM,IAAI,WAAW,MAAM,UAAU;AACnE,YAAM,oBAAoB,gBAAgB;AAE1C,aAAO,KAAK,kBAAkB,eAAe,iBAAiB;AAAA,IAChE,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,6BAA6B,SAAsD;AACzF,UAAM,aAAkB;AAAA,MACtB,4BAA4B,SAAS,kBAAkB;AAAA,MACvD,iBAAiB;AAAA,MACjB,qBAAqB;AAAA,IACvB;AAEA,QAAI,SAAS,aAAa;AACxB,iBAAW,qBAAqB;AAChC,UAAI,QAAQ,kBAAkB;AAC5B,mBAAW,cAAc;AAAA,UACvB,UAAU;AAAA,YACR,UAAU;AAAA,YACV,UAAU,QAAQ;AAAA,UACpB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,oBAAoB,QAAQ,iBAAiB,SAAS,GAAG;AACpE,iBAAW,mBAAmB;AAAA,QAC5B,SAAS,QAAQ,iBAAiB,KAAK,GAAG;AAAA,MAC5C;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,QAA8D;AACpF,UAAM,YAAY,QAAQ,SAAS,EAAE,YAAY,KAAK;AAEtD,QAAI,UAAU,SAAS,WAAW,EAAG,QAAO;AAC5C,QAAI,UAAU,SAAS,SAAS,EAAG,QAAO;AAC1C,QAAI,UAAU,SAAS,YAAY,EAAG,QAAO;AAC7C,QAAI,UAAU,SAAS,QAAQ,EAAG,QAAO;AAEzC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBACN,eACA,mBAC2B;AAC3B,UAAM,kBAAkB,kBAAkB,6BAA6B,CAAC;AACxE,UAAM,oBAAoB,kBAAkB,qBAAqB,CAAC;AAGlE,UAAM,WACJ,gBAAgB,IAAI,CAAC,WAAgB,OAAO,WAAW,OAAO,OAAO,EAAE,KAAK,GAAG,KAAK;AAGtF,UAAM,QAAQ,kBAAkB;AAAA,MAAQ,CAAC,YACtC,OAAO,QAAQ,CAAC,GAAG,SAAS,CAAC,GAAG,IAAI,CAAC,UAAe;AAAA,QACnD,MAAM,KAAK;AAAA,QACX,OAAO,KAAK,gBAAgB;AAAA;AAAA,QAC5B,MAAM,KAAK,gBAAgB,KAAK,mBAAmB;AAAA,QACnD,YAAY,KAAK;AAAA,QACjB,SAAS,OAAO,YAAY,SAAY,OAAO,QAAQ,SAAS,IAAI;AAAA,MACtE,EAAE;AAAA,IACJ;AAGA,UAAM,WACJ,kBAAkB,SAAS,KAAK,kBAAkB,CAAC,EAAE,YAAY,SAC7D,MAAM;AAAA,MACJ,IAAI;AAAA,QACF,kBAAkB,IAAI,CAAC,MAAW,EAAE,OAAO,EAAE,OAAO,CAAC,MAAW,MAAM,MAAS;AAAA,MACjF;AAAA,IACF,EAAE,IAAI,CAAC,eAAwB;AAAA,MAC7B,IAAI,OAAO,SAAS;AAAA,MACpB,OAAO,WAAW,SAAS;AAAA,IAC7B,EAAE,IACF;AAEN,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,MAAM;AAAA,QACJ,IAAI,cAAc,MAAM,MAAM,GAAG,EAAE,IAAI,KAAK;AAAA,QAC5C,MAAM;AAAA,QACN,YAAY,kBAAkB,CAAC,GAAG,QAAQ,CAAC,GAAG;AAAA,QAC9C,QAAQ;AAAA,QACR,UAAU,cAAc;AAAA,QACxB,UAAU,kBAAkB,WAAW,kBAAkB,WAAW,MAAW;AAAA,QAC/E;AAAA,QACA,OAAO,MAAM,SAAS,IAAI,QAAQ;AAAA,QAClC,WAAW,cAAc;AAAA,QACzB,aAAa,cAAc;AAAA,MAC7B;AAAA,MACA,KAAK;AAAA,QACH;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,sBACd,QACiB;AACjB,QAAM,UAAU,IAAI,gBAAgB;AACpC,UAAQ,WAAW,MAAM;AACzB,SAAO;AACT;;;AE5XA,IAAAC,gBAAkB;;;ACIlB,IAAAC,gBAAkB;AAmCX,IAAM,sBAAsB,CAGjC,4BACA,YACmB;AACnB,QAAM,WAAW,IAAI,SAAS;AAC9B,WAAS,OAAO,QAAQ,2BAA2B,IAAI;AACvD,WAAS,OAAO,SAAS,2BAA2B,KAAK;AACzD,MAAI,2BAA2B,aAAa,QAAW;AACrD,aAAS,OAAO,YAAY,2BAA2B,QAAQ;AAAA,EACjE;AACA,MAAI,2BAA2B,WAAW,QAAW;AACnD,aAAS,OAAO,UAAU,2BAA2B,MAAM;AAAA,EAC7D;AACA,MAAI,2BAA2B,oBAAoB,QAAW;AAC5D,aAAS,OAAO,mBAAmB,2BAA2B,eAAe;AAAA,EAC/E;AACA,MAAI,2BAA2B,gBAAgB,QAAW;AACxD,aAAS,OAAO,eAAe,2BAA2B,YAAY,SAAS,CAAC;AAAA,EAClF;AACA,MAAI,2BAA2B,YAAY,QAAW;AACpD,+BAA2B,QAAQ,QAAQ,CAAC,UAAU,SAAS,OAAO,WAAW,KAAK,CAAC;AAAA,EACzF;AACA,MAAI,2BAA2B,4BAA4B,QAAW;AACpE,+BAA2B,wBAAwB;AAAA,MAAQ,CAAC,UAC1D,SAAS,OAAO,2BAA2B,KAAK;AAAA,IAClD;AAAA,EACF;AACA,MACE,2BAA2B,WAAW,UACtC,2BAA2B,WAAW,MACtC;AACA,aAAS,OAAO,UAAU,2BAA2B,OAAO,SAAS,CAAC;AAAA,EACxE;AACA,MACE,2BAA2B,sBAAsB,UACjD,2BAA2B,sBAAsB,MACjD;AACA,aAAS;AAAA,MACP;AAAA,MACA,OAAO,2BAA2B,sBAAsB,WACpD,KAAK,UAAU,2BAA2B,iBAAiB,IAC3D,2BAA2B;AAAA,IACjC;AAAA,EACF;AACA,MAAI,2BAA2B,wBAAwB,QAAW;AAChE,+BAA2B,oBAAoB;AAAA,MAAQ,CAAC,UACtD,SAAS,OAAO,uBAAuB,KAAK;AAAA,IAC9C;AAAA,EACF;AACA,MAAI,2BAA2B,6BAA6B,QAAW;AACrE,+BAA2B,yBAAyB;AAAA,MAAQ,CAAC,UAC3D,SAAS,OAAO,4BAA4B,KAAK;AAAA,IACnD;AAAA,EACF;AAEA,SAAO,cAAAC,QAAM,KAAK,yBAAyB,UAAU,OAAO;AAC9D;;;ADMO,IAAM,uBAAN,cAAmC,YAAY;AAAA,EAA/C;AAAA;AACL,SAAS,OAAO;AAChB,SAAS,eAAqC;AAAA,MAC5C,WAAW;AAAA;AAAA,MACX,aAAa;AAAA;AAAA,MACb,gBAAgB;AAAA,MAChB,mBAAmB;AAAA;AAAA,MACnB,kBAAkB;AAAA;AAAA,MAClB,eAAe;AAAA,MACf,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,MACjB,cAAc;AAAA,IAChB;AAEA,SAAU,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMV,iBAAiB;AACzB,WAAO,MAAM,eAAe,iBAAiB,CAAC,WAAW,UAAU,MAAM,EAAE;AAAA,EAC7E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,WACJ,OACA,SACoC;AACpC,SAAK,eAAe;AAEpB,QAAI;AAEF,UAAI;AACJ,UAAI,WAAW;AAEf,UAAI,MAAM,SAAS,OAAO;AACxB,cAAMC,YAAW,MAAM,cAAAC,QAAM,IAAI,MAAM,KAAK;AAAA,UAC1C,cAAc;AAAA,QAChB,CAAC;AACD,oBAAY,OAAO,KAAKD,UAAS,IAAI;AAGrC,cAAM,UAAU,IAAI,IAAI,MAAM,GAAG,EAAE;AACnC,cAAM,gBAAgB,QAAQ,MAAM,GAAG,EAAE,IAAI;AAC7C,YAAI,eAAe;AACjB,qBAAW;AAAA,QACb;AAAA,MACF,WAAW,MAAM,SAAS,QAAQ;AAChC,oBAAY,MAAM;AAClB,mBAAW,MAAM,YAAY;AAAA,MAC/B,OAAO;AACL,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,OAAO;AAAA,YACL,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,QACF;AAAA,MACF;AAGA,YAAM,QAAQ,KAAK,YAAY,OAAO;AAGtC,YAAM,gBAAgB,UAAU;AAChC,YAAM,aAAa,SAAS,mBAAmB;AAG/C,YAAM,UAAsC;AAAA,QAC1C,MAAM;AAAA;AAAA,QACN;AAAA,MACF;AAGA,UAAI,SAAS,UAAU;AACrB,gBAAQ,WAAW,QAAQ;AAAA,MAC7B;AAEA,UAAI,SAAS,UAAU,QAAQ;AAC7B,gBAAQ,SAAS,QAAQ,SAAS;AAAA,MACpC;AAEA,UAAI,SAAS,UAAU,gBAAgB,QAAW;AAChD,gBAAQ,cAAc,QAAQ,SAAS;AAAA,MACzC;AAEA,UAAI,eAAe;AAEjB,gBAAQ,kBAAkB;AAG1B,YAAI,SAAS,UAAU,mBAAmB;AACxC,kBAAQ,sBAAsB,QAAQ,SAAS;AAAA,QACjD;AAEA,YAAI,SAAS,UAAU,wBAAwB;AAC7C,kBAAQ,2BAA2B,QAAQ,SAAS;AAAA,QACtD;AAAA,MACF,WAAW,cAAc,SAAS,aAAa;AAE7C,gBAAQ,kBAAkB;AAG1B,YAAI,YAAY;AACd,kBAAQ,0BAA0B,CAAC,QAAQ,SAAS;AAAA,QACtD;AAAA,MACF,OAAO;AAEL,gBAAQ,kBAAkB;AAAA,MAC5B;AAGA,YAAM,WAAW,MAAM,oBAAoB,SAAS,KAAK,eAAe,CAAC;AAEzE,aAAO,KAAK,kBAAkB,SAAS,MAAa,OAAO,aAAa;AAAA,IAC1E,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,cAAc,cAA0D;AAC5E,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SACE;AAAA,MACJ;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAY,SAAsD;AAExE,QAAI,SAAS,UAAU,OAAO;AAC5B,aAAO,QAAQ,SAAS;AAAA,IAC1B;AAGA,QAAI,SAAS,aAAa;AACxB,aAAO;AAAA,IACT;AAGA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBACN,UAIA,OACA,eAC2B;AAE3B,QAAI,UAAU,YAAY,OAAO,KAAK,QAAQ,EAAE,WAAW,GAAG;AAC5D,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,MAAM;AAAA,UACJ,IAAI,UAAU,KAAK,IAAI,CAAC;AAAA,UACxB,MAAM,SAAS;AAAA,UACf,QAAQ;AAAA,UACR,UAAU;AAAA,UACV,YAAY;AAAA,QACd;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAGA,QAAI,iBAAiB,cAAc,UAAU;AAC3C,YAAM,mBAAmB;AAGzB,YAAM,aAAa,IAAI,IAAI,iBAAiB,SAAS,IAAI,CAAC,QAAQ,IAAI,OAAO,CAAC;AAC9E,YAAM,WAAW,MAAM,KAAK,UAAU,EAAE,IAAI,CAAC,aAAa;AAAA,QACxD,IAAI;AAAA,QACJ,OAAO;AAAA;AAAA,MACT,EAAE;AAGF,YAAM,aAAa,iBAAiB,SAAS,IAAI,CAAC,aAAa;AAAA,QAC7D,SAAS,QAAQ;AAAA,QACjB,MAAM,QAAQ;AAAA,QACd,OAAO,QAAQ;AAAA,QACf,KAAK,QAAQ;AAAA,QACb,YAAY;AAAA,MACd,EAAE;AAEF,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,MAAM;AAAA,UACJ,IAAI,UAAU,KAAK,IAAI,CAAC;AAAA,UACxB,MAAM,iBAAiB;AAAA,UACvB,QAAQ;AAAA,UACR,UAAU;AAAA,UACV,UAAU,iBAAiB;AAAA,UAC3B;AAAA,UACA;AAAA,QACF;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAGA,QAAI,cAAc,YAAY,cAAc,UAAU;AACpD,YAAM,kBAAkB;AAGxB,YAAM,QAAQ,gBAAgB,OAAO,IAAI,CAAC,UAAU;AAAA,QAClD,MAAM,KAAK;AAAA,QACX,OAAO,KAAK;AAAA,QACZ,KAAK,KAAK;AAAA,QACV,YAAY;AAAA,MACd,EAAE;AAEF,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,MAAM;AAAA,UACJ,IAAI,UAAU,KAAK,IAAI,CAAC;AAAA,UACxB,MAAM,gBAAgB;AAAA,UACtB,QAAQ;AAAA,UACR,UAAU,gBAAgB;AAAA,UAC1B,UAAU,gBAAgB;AAAA,UAC1B;AAAA,QACF;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAGA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,MAAM;AAAA,QACJ,IAAI,UAAU,KAAK,IAAI,CAAC;AAAA,QACxB,MAAM,UAAU,WAAW,SAAS,OAAO;AAAA,QAC3C,QAAQ;AAAA,MACV;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AACF;AAKO,SAAS,2BAA2B,QAA8C;AACvF,QAAM,UAAU,IAAI,qBAAqB;AACzC,UAAQ,WAAW,MAAM;AACzB,SAAO;AACT;;;AE/XA,IAAAE,gBAA0C;AAkGnC,IAAM,sBAAN,cAAkC,YAAY;AAAA,EAA9C;AAAA;AACL,SAAS,OAAO;AAChB,SAAS,eAAqC;AAAA,MAC5C,WAAW;AAAA;AAAA,MACX,aAAa;AAAA,MACb,gBAAgB;AAAA,MAChB,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,MACjB,cAAc;AAAA,IAChB;AAGA,SAAU,UAAU;AAAA;AAAA,EAEpB,WAAW,QAA8B;AACvC,UAAM,WAAW,MAAM;AAEvB,SAAK,UAAU,OAAO,WAAW,KAAK;AAEtC,SAAK,SAAS,cAAAC,QAAM,OAAO;AAAA,MACzB,SAAS,KAAK;AAAA,MACd,SAAS,OAAO,WAAW;AAAA,MAC3B,SAAS;AAAA,QACP,eAAe,UAAU,OAAO,MAAM;AAAA,QACtC,GAAG,OAAO;AAAA,MACZ;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,WACJ,OACA,SACoC;AACpC,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,YAAuB;AAAA,QAC3B,MAAM;AAAA,QACN,sBAAsB;AAAA,UACpB,UAAU,SAAS,YAAY;AAAA,UAC/B,iBACG,SAAS,UAAU,mBAA+C;AAAA,QACvE;AAAA,MACF;AAGA,UAAI,SAAS,aAAa;AACxB,YAAI,CAAC,UAAU,sBAAsB;AACnC,oBAAU,uBAAuB,CAAC;AAAA,QACpC;AACA,kBAAU,qBAAqB,cAAc;AAC7C,YAAI,QAAQ,kBAAkB;AAC5B,oBAAU,qBAAqB,6BAA6B;AAAA,YAC1D,cAAc,QAAQ;AAAA,UACxB;AAAA,QACF;AAAA,MACF;AAGA,UAAI,SAAS,mBAAmB;AAC9B,YAAI,CAAC,UAAU,sBAAsB;AACnC,oBAAU,uBAAuB,CAAC;AAAA,QACpC;AACA,kBAAU,qBAAqB,4BAA4B;AAAA,MAC7D;AAGA,UAAI,SAAS,iBAAiB,SAAS,UAAU,cAAc;AAC7D,YAAI,CAAC,UAAU,sBAAsB;AACnC,oBAAU,uBAAuB,CAAC;AAAA,QACpC;AACA,kBAAU,qBAAqB,uBAAuB;AAAA,UACpD,MAAM,QAAQ,SAAS;AAAA,UACvB,QAAS,QAAQ,SAAS,kBAAkD;AAAA,QAC9E;AAAA,MACF;AAGA,UAAI,SAAS,oBAAoB,QAAQ,iBAAiB,SAAS,GAAG;AACpE,YAAI,CAAC,UAAU,sBAAsB;AACnC,oBAAU,uBAAuB,CAAC;AAAA,QACpC;AACA,kBAAU,qBAAqB,mBAAmB,QAAQ;AAAA,MAC5D;AAGA,UAAI;AACJ,UAAI,UAAkC,CAAC;AAEvC,UAAI,MAAM,SAAS,OAAO;AAExB,kBAAU,aAAa;AAAA,UACrB,KAAK,MAAM;AAAA,QACb;AACA,sBAAc,EAAE,QAAQ,KAAK,UAAU,SAAS,EAAE;AAClD,kBAAU,EAAE,gBAAgB,mBAAmB;AAAA,MACjD,WAAW,MAAM,SAAS,QAAQ;AAEhC,sBAAc;AAAA,UACZ,QAAQ,KAAK,UAAU,SAAS;AAAA,UAChC,WAAW,MAAM;AAAA,QACnB;AACA,kBAAU,EAAE,gBAAgB,sBAAsB;AAAA,MACpD,OAAO;AACL,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,OAAO;AAAA,YACL,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,QACF;AAAA,MACF;AAGA,YAAM,WAAW,MAAM,KAAK,OAAQ,KAAwB,SAAS,aAAa,EAAE,QAAQ,CAAC;AAE7F,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,MAAM;AAAA,UACJ,IAAI,SAAS,KAAK;AAAA,UAClB,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,WAAW,SAAS,KAAK;AAAA,QAC3B;AAAA,QACA,KAAK,SAAS;AAAA,MAChB;AAAA,IACF,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,cAAc,cAA0D;AAC5E,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,iBAAiB,MAAM,KAAK,OAAQ,IAAwB,SAAS,YAAY,EAAE;AAEzF,YAAM,SAAS,KAAK,gBAAgB,eAAe,KAAK,IAAI,MAAM;AAElE,UAAI,WAAW,aAAa;AAC1B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,MAAM;AAAA,YACJ,IAAI;AAAA,YACJ,MAAM;AAAA,YACN;AAAA,YACA,WAAW,eAAe,KAAK,IAAI;AAAA,UACrC;AAAA,UACA,KAAK,eAAe;AAAA,QACtB;AAAA,MACF;AAGA,YAAM,qBAAqB,MAAM,KAAK,OAAQ;AAAA,QAC5C,SAAS,YAAY;AAAA,MACvB;AAEA,aAAO,KAAK,kBAAkB,mBAAmB,IAAI;AAAA,IACvD,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,QAAiE;AACvF,YAAQ,QAAQ;AAAA,MACd,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,MACT;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,UAA4D;AAEpF,UAAM,OAAO,SAAS,QACnB,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE,YAAY,EACjD,IAAI,CAAC,MAAM,EAAE,aAAc,CAAC,GAAG,WAAW,EAAE,EAC5C,KAAK,GAAG;AAGX,UAAM,QAAQ,SAAS,QACpB,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE,eAAe,UAAa,EAAE,aAAa,MAAS,EACzF,IAAI,CAAC,YAAY;AAAA,MAChB,MAAM,OAAO,eAAe,CAAC,GAAG,WAAW;AAAA,MAC3C,OAAO,OAAO;AAAA,MACd,KAAK,OAAO;AAAA,MACZ,YAAY,OAAO,eAAe,CAAC,GAAG;AAAA,MACtC,SAAS,OAAO,eAAe,CAAC,GAAG;AAAA,IACrC,EAAE;AAGJ,UAAM,aAAa,oBAAI,IAAY;AACnC,aAAS,QAAQ,QAAQ,CAAC,MAAM;AAC9B,UAAI,EAAE,cAAc;AAClB,cAAM,UAAU,EAAE,aAAa,CAAC,GAAG;AACnC,YAAI,QAAS,YAAW,IAAI,OAAO;AAAA,MACrC;AAAA,IACF,CAAC;AAED,UAAM,WACJ,WAAW,OAAO,IACd,MAAM,KAAK,UAAU,EAAE,IAAI,CAAC,QAAQ;AAAA,MAClC;AAAA,MACA,OAAO,WAAW,EAAE;AAAA,IACtB,EAAE,IACF;AAGN,UAAM,aAKD,CAAC;AAEN,QAAI,UAAU;AACZ,UAAI;AACJ,UAAI,mBAA6B,CAAC;AAClC,UAAI,iBAAiB;AAErB,eAAS,QACN,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE,YAAY,EACjD,QAAQ,CAAC,QAAQ,QAAQ;AACxB,cAAM,UAAU,OAAO,aAAc,CAAC,GAAG;AACzC,cAAM,OAAO,OAAO,aAAc,CAAC,GAAG,WAAW;AAEjD,YAAI,YAAY,gBAAgB;AAE9B,cAAI,kBAAkB,iBAAiB,SAAS,GAAG;AACjD,kBAAM,aAAa,SAAS,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,MAAM,EAAE,MAAM,CAAC;AAC5E,uBAAW,KAAK;AAAA,cACd,SAAS;AAAA,cACT,MAAM,iBAAiB,KAAK,GAAG;AAAA,cAC/B,OAAO,kBAAkB;AAAA,cACzB,KAAK,YAAY,YAAY,OAAO,cAAc;AAAA,YACpD,CAAC;AAAA,UACH;AAGA,2BAAiB;AACjB,6BAAmB,CAAC,IAAI;AACxB,2BAAiB,OAAO,cAAc;AAAA,QACxC,OAAO;AACL,2BAAiB,KAAK,IAAI;AAAA,QAC5B;AAAA,MACF,CAAC;AAGH,UAAI,kBAAkB,iBAAiB,SAAS,GAAG;AACjD,cAAM,WAAW,SAAS,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,MAAM,EAAE,IAAI;AACvE,mBAAW,KAAK;AAAA,UACd,SAAS;AAAA,UACT,MAAM,iBAAiB,KAAK,GAAG;AAAA,UAC/B,OAAO;AAAA,UACP,KAAK,UAAU,YAAY;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,MAAM;AAAA,QACJ,IAAI,SAAS,IAAI;AAAA,QACjB;AAAA,QACA,QAAQ;AAAA,QACR,UAAU,SAAS,SAAS,sBAAsB;AAAA,QAClD,UAAU,SAAS,IAAI;AAAA,QACvB;AAAA,QACA,OAAO,MAAM,SAAS,IAAI,QAAQ;AAAA,QAClC,YAAY,WAAW,SAAS,IAAI,aAAa;AAAA,QACjD,SAAS,SAAS,SAAS;AAAA,QAC3B,WAAW,SAAS,IAAI;AAAA,MAC1B;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AACF;AAKO,SAAS,0BAA0B,QAA6C;AACrF,QAAM,UAAU,IAAI,oBAAoB;AACxC,UAAQ,WAAW,MAAM;AACzB,SAAO;AACT;;;AC7ZO,IAAe,qBAAf,MAAkC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA2FvC,SACE,SACA,SACmB;AACnB,QAAI;AAEF,UAAI,CAAC,KAAK,QAAQ,SAAS,OAAO,GAAG;AACnC,eAAO;AAAA,UACL,OAAO;AAAA,UACP,OAAO,0BAA0B,KAAK,QAAQ;AAAA,QAChD;AAAA,MACF;AAGA,YAAM,QAAQ,KAAK,MAAM,SAAS,OAAO;AAGzC,UAAI,CAAC,MAAM,YAAY,CAAC,MAAM,WAAW;AACvC,eAAO;AAAA,UACL,OAAO;AAAA,UACP,OAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO;AAAA,QACL,OAAO;AAAA,QACP,UAAU,KAAK;AAAA,QACf,SAAS;AAAA,UACP,WAAW,MAAM;AAAA,UACjB,SAAS,MAAM;AAAA,QACjB;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,aAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,QAChD,SAAS,EAAE,MAAM;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKU,iBAAiB,SAAkB,cAA2C;AACtF,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,WAAW;AAAA,MACX,MAAM;AAAA,QACJ,IAAI;AAAA,QACJ,QAAQ;AAAA,QACR,OAAO;AAAA,MACT;AAAA,MACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MAClC,KAAK;AAAA,IACP;AAAA,EACF;AACF;;;ACxHO,IAAM,uBAAN,cAAmC,mBAAmB;AAAA,EAAtD;AAAA;AACL,SAAS,WAAkC;AAAA;AAAA;AAAA;AAAA;AAAA,EAK3C,QACE,SACA,UACS;AACT,QAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AAC3C,aAAO;AAAA,IACT;AAEA,UAAM,MAAM;AAGZ,QAAI,EAAE,WAAW,QAAQ,EAAE,aAAa,MAAM;AAC5C,aAAO;AAAA,IACT;AAGA,QAAI,OAAO,IAAI,UAAU,UAAU;AACjC,aAAO;AAAA,IACT;AAEA,QAAI,CAAC,IAAI,MAAM,WAAW,gBAAgB,GAAG;AAC3C,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,IAAI,WAAW,OAAO,IAAI,YAAY,UAAU;AACnD,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,IAAI;AACvB,WAAO,OAAO,WAAW,OAAO;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,MACE,SACA,UACqB;AACrB,QAAI,CAAC,KAAK,QAAQ,OAAO,GAAG;AAC1B,aAAO,KAAK,iBAAiB,SAAS,gCAAgC;AAAA,IACxE;AAEA,UAAM,iBAAiB;AAKvB,UAAM,QAAQ,eAAe,QAAQ;AACrC,UAAM,QAAQ,eAAe;AAG7B,QAAI,UAAU,yBAAyB;AACrC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,QACV;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,KAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,UAAU,yBAAyB;AAGrC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA;AAAA;AAAA,QAGV;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,KAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,UAAU,uBAAuB;AACnC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,UACR,OAAO;AAAA,QACT;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,KAAK;AAAA,MACP;AAAA,IACF;AAGA,WAAO,KAAK,iBAAiB,SAAS,iCAAiC,KAAK,EAAE;AAAA,EAChF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,SAAkB;AAGhB,WAAO;AAAA,EACT;AACF;AAKO,SAAS,6BAAmD;AACjE,SAAO,IAAI,qBAAqB;AAClC;;;ACtKA,yBAAmB;AA4CZ,IAAM,2BAAN,cAAuC,mBAAmB;AAAA,EAA1D;AAAA;AACL,SAAS,WAAkC;AAAA;AAAA;AAAA;AAAA;AAAA,EAK3C,QACE,SACA,UACS;AACT,QAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AAC3C,aAAO;AAAA,IACT;AAEA,UAAM,MAAM;AAGZ,QAAI,EAAE,mBAAmB,QAAQ,EAAE,YAAY,MAAM;AACnD,aAAO;AAAA,IACT;AAGA,QAAI,OAAO,IAAI,kBAAkB,UAAU;AACzC,aAAO;AAAA,IACT;AAGA,QAAI,IAAI,WAAW,eAAe,IAAI,WAAW,SAAS;AACxD,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MACE,SACA,UACqB;AACrB,QAAI,CAAC,KAAK,QAAQ,OAAO,GAAG;AAC1B,aAAO,KAAK,iBAAiB,SAAS,oCAAoC;AAAA,IAC5E;AAEA,UAAM,eAAe;AACrB,UAAM,eAAe,aAAa;AAClC,UAAM,SAAS,aAAa;AAE5B,QAAI,WAAW,aAAa;AAC1B,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA;AAAA;AAAA,QAGV;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,KAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,WAAW,SAAS;AACtB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,UACR,OAAO;AAAA,QACT;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,KAAK;AAAA,MACP;AAAA,IACF;AAGA,WAAO,KAAK,iBAAiB,SAAS,8BAA8B,MAAM,EAAE;AAAA,EAC9E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqBA,OAAO,SAAkB,SAA8C;AAErE,QAAI,CAAC,QAAQ,aAAa,CAAC,QAAQ,QAAQ;AACzC,aAAO;AAAA,IACT;AAEA,QAAI;AAEF,YAAM,OACJ,QAAQ,YAAY,OAAO,YAAY,WAAW,UAAU,KAAK,UAAU,OAAO;AAGpF,YAAM,OAAO,mBAAAC,QAAO,WAAW,UAAU,QAAQ,MAAM;AACvD,YAAM,aAAa,OAAO,SAAS,WAAW,OAAO,KAAK,IAAI,IAAI;AAClE,WAAK,OAAO,UAAU;AACtB,YAAM,oBAAoB,KAAK,OAAO,KAAK;AAG3C,aAAO,mBAAAA,QAAO,gBAAgB,OAAO,KAAK,QAAQ,SAAS,GAAG,OAAO,KAAK,iBAAiB,CAAC;AAAA,IAC9F,SAAS,OAAO;AAEd,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKO,SAAS,iCAA2D;AACzE,SAAO,IAAI,yBAAyB;AACtC;;;AC9HO,IAAM,yBAAN,cAAqC,mBAAmB;AAAA,EAAxD;AAAA;AACL,SAAS,WAAkC;AAAA;AAAA;AAAA;AAAA;AAAA,EAK3C,QACE,SACA,UACS;AACT,QAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AAC3C,aAAO;AAAA,IACT;AAEA,UAAM,MAAM;AAGZ,QAAI,EAAE,cAAc,QAAQ,EAAE,aAAa,MAAM;AAC/C,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,IAAI,YAAY,OAAO,IAAI,aAAa,UAAU;AACrD,aAAO;AAAA,IACT;AAEA,UAAM,WAAW,IAAI;AACrB,QAAI,EAAE,gBAAgB,WAAW;AAC/B,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,IAAI,WAAW,OAAO,IAAI,YAAY,UAAU;AACnD,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,IAAI;AACpB,WAAO,cAAc;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,MACE,SACA,UACqB;AACrB,QAAI,CAAC,KAAK,QAAQ,OAAO,GAAG;AAC1B,aAAO,KAAK,iBAAiB,SAAS,kCAAkC;AAAA,IAC1E;AAEA,UAAM,WAAW;AAEjB,QAAI;AAEF,YAAM,YAAY,SAAS,SAAS;AACpC,YAAM,WAAW,SAAS,SAAS;AACnC,YAAM,WAAW,SAAS,QAAQ,YAAY,CAAC;AAG/C,UAAI,SAAS,WAAW,GAAG;AACzB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,WAAW;AAAA,UACX,MAAM;AAAA,YACJ,IAAI,aAAa;AAAA,YACjB,QAAQ;AAAA,YACR,OAAO;AAAA,UACT;AAAA,UACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC,KAAK;AAAA,QACP;AAAA,MACF;AAEA,YAAM,UAAU,SAAS,CAAC;AAC1B,YAAM,eAAe,QAAQ,gBAAgB,CAAC;AAE9C,UAAI,aAAa,WAAW,GAAG;AAC7B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,WAAW;AAAA,UACX,MAAM;AAAA,YACJ,IAAI,aAAa;AAAA,YACjB,QAAQ;AAAA,YACR,OAAO;AAAA,UACT;AAAA,UACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC,KAAK;AAAA,QACP;AAAA,MACF;AAEA,YAAM,cAAc,aAAa,CAAC;AAClC,YAAM,aAAa,YAAY;AAG/B,UAAI,CAAC,YAAY;AACf,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,WAAW;AAAA,UACX,MAAM;AAAA,YACJ,IAAI,aAAa;AAAA,YACjB,QAAQ;AAAA,YACR,OAAO;AAAA,UACT;AAAA,UACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC,KAAK;AAAA,QACP;AAAA,MACF;AAGA,YAAM,QACJ,YAAY,SAAS,YAAY,MAAM,SAAS,IAC5C,YAAY,MAAM,IAAI,CAAC,UAAU;AAAA,QAC/B,MAAM,KAAK,QAAQ;AAAA,QACnB,OAAO,KAAK,SAAS;AAAA,QACrB,KAAK,KAAK,OAAO;AAAA,QACjB,YAAY,KAAK;AAAA,MACnB,EAAE,IACF;AAGN,YAAM,WACJ,SAAS,QAAQ,cAAc,SAAS,QAAQ,WAAW,SAAS,IAChE,SAAS,QAAQ,WAAW,IAAI,CAAC,eAAe;AAAA,QAC9C,IAAI,UAAU,SAAS,SAAS,KAAK;AAAA,QACrC,SAAS,UAAU,SAAS,SAAS,KAAK;AAAA,QAC1C,MAAM,UAAU,cAAc;AAAA,QAC9B,YAAY,UAAU;AAAA,MACxB,EAAE,IACF;AAGN,YAAM,aACJ,SAAS,QAAQ,cAAc,SAAS,QAAQ,WAAW,SAAS,IAChE,SAAS,QAAQ,WAAW,IAAI,CAAC,eAAe;AAAA,QAC9C,MAAM,UAAU,cAAc;AAAA,QAC9B,OAAO,UAAU,SAAS;AAAA,QAC1B,KAAK,UAAU,OAAO;AAAA,QACtB,SAAS,UAAU,SAAS,SAAS;AAAA,QACrC,YAAY,UAAU;AAAA,QACtB,OACE,UAAU,SAAS,UAAU,MAAM,SAAS,IACxC,UAAU,MAAM,IAAI,CAAC,UAAU;AAAA,UAC7B,MAAM,KAAK,QAAQ;AAAA,UACnB,OAAO,KAAK,SAAS;AAAA,UACrB,KAAK,KAAK,OAAO;AAAA,UACjB,YAAY,KAAK;AAAA,QACnB,EAAE,IACF;AAAA,MACR,EAAE,IACF;AAGN,YAAM,UAAU,YAAY,YAAY,CAAC,GAAG;AAE5C,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI,aAAa;AAAA,UACjB,QAAQ;AAAA,UACR,MAAM;AAAA,UACN,YAAY,YAAY;AAAA,UACxB;AAAA,UACA,UAAU,SAAS,SAAS,SAAS,CAAC,KAAK;AAAA,UAC3C,UAAU,YAAY,SAAS,SAAS,IAAI,WAAW;AAAA,UACvD,OAAO,SAAS,MAAM,SAAS,IAAI,QAAQ;AAAA,UAC3C,YAAY,cAAc,WAAW,SAAS,IAAI,aAAa;AAAA,UAC/D;AAAA,UACA,UAAU;AAAA,YACR,UAAU,SAAS,SAAS;AAAA,YAC5B,SAAS,SAAS,SAAS;AAAA,YAC3B,QAAQ,SAAS,SAAS;AAAA,UAC5B;AAAA,QACF;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,KAAK;AAAA,MACP;AAAA,IACF,SAAS,OAAO;AACd,aAAO,KAAK;AAAA,QACV;AAAA,QACA,qCAAqC,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MAC/F;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,SAAkB;AAGhB,WAAO;AAAA,EACT;AACF;AAKO,SAAS,+BAAuD;AACrE,SAAO,IAAI,uBAAuB;AACpC;;;ACvQA,IAAAC,sBAAmB;AA8EZ,IAAM,sBAAN,cAAkC,mBAAmB;AAAA,EAArD;AAAA;AACL,SAAS,WAAkC;AAAA;AAAA;AAAA;AAAA;AAAA,EAK3C,QACE,SACA,UACS;AACT,QAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AAC3C,aAAO;AAAA,IACT;AAEA,UAAM,MAAM;AAGZ,QAAI,EAAE,YAAY,QAAQ,EAAE,eAAe,MAAM;AAC/C,aAAO;AAAA,IACT;AAGA,QAAI,OAAO,IAAI,WAAW,UAAU;AAClC,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,IAAI,OAAO,WAAW,eAAe,GAAG;AAC3C,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MACE,SACA,UACqB;AACrB,QAAI,CAAC,KAAK,QAAQ,OAAO,GAAG;AAC1B,aAAO,KAAK,iBAAiB,SAAS,+BAA+B;AAAA,IACvE;AAEA,UAAM,iBAAiB;AACvB,UAAM,SAAS,eAAe;AAC9B,UAAM,YAAY,eAAe;AAIjC,QAAI,kBAAkB;AACtB,QAAI,eAAe,MAAM;AACvB,YAAM,QAAQ,eAAe,KAAK,MAAM,4BAA4B;AACpE,UAAI,OAAO;AACT,0BAAkB,MAAM,CAAC;AAAA,MAC3B;AAAA,IACF;AAGA,QAAI,WAAW,wBAAwB;AACrC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,UACR,WAAW;AAAA,QACb;AAAA,QACA;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,WAAW,wBAAwB;AACrC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,QACV;AAAA,QACA;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,WAAW,0BAA0B;AACvC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,UACR,aAAa;AAAA;AAAA;AAAA,QAGf;AAAA,QACA;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,WAAW,uBAAuB;AACpC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,UACR,OAAO,eAAe,OAAO,WAAW;AAAA,UACxC,UAAU;AAAA,YACR,WAAW,eAAe,OAAO;AAAA,UACnC;AAAA,QACF;AAAA,QACA;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAGA,WAAO,KAAK,iBAAiB,SAAS,iCAAiC,MAAM,EAAE;AAAA,EACjF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwBA,OAAO,SAAkB,SAA8C;AAGrE,QAAI,CAAC,QAAQ,WAAW;AACtB,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,QAAQ,QAAQ;AACnB,aAAO;AAAA,IACT;AAEA,QAAI;AAEF,YAAM,OACJ,QAAQ,YAAY,OAAO,YAAY,WAAW,UAAU,KAAK,UAAU,OAAO;AAGpF,YAAM,OAAO,oBAAAC,QAAO,WAAW,UAAU,QAAQ,MAAM;AACvD,YAAM,aAAa,OAAO,SAAS,WAAW,OAAO,KAAK,IAAI,IAAI;AAClE,WAAK,OAAO,UAAU;AACtB,YAAM,oBAAoB,KAAK,OAAO,KAAK;AAG3C,aAAO,oBAAAA,QAAO,gBAAgB,OAAO,KAAK,QAAQ,SAAS,GAAG,OAAO,KAAK,iBAAiB,CAAC;AAAA,IAC9F,SAAS,OAAO;AAEd,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKO,SAAS,4BAAiD;AAC/D,SAAO,IAAI,oBAAoB;AACjC;;;AClOO,IAAM,6BAAN,cAAyC,mBAAmB;AAAA,EAA5D;AAAA;AACL,SAAS,WAAkC;AAAA;AAAA;AAAA;AAAA;AAAA,EAK3C,QACE,SACA,SACS;AAET,QAAI,SAAS,WAAW;AACtB,UAAI,CAAC,QAAQ,UAAU,SAAS,kBAAkB,GAAG;AACnD,eAAO;AAAA,MACT;AAAA,IACF;AAGA,QAAI,SAAS,aAAa;AACxB,YAAM,EAAE,IAAI,OAAO,IAAI,QAAQ;AAC/B,UAAI,CAAC,MAAM,CAAC,QAAQ;AAClB,eAAO;AAAA,MACT;AAAA,IACF;AAIA,QAAI,WAAW,OAAO,YAAY,UAAU;AAC1C,YAAM,MAAM;AAGZ,UAAI,YAAY,OAAO,SAAS,OAAO,cAAc,KAAK;AACxD,eAAO;AAAA,MACT;AAGA,UAAI,SAAS,OAAO,QAAQ,KAAK;AAC/B,eAAO;AAAA,MACT;AAAA,IACF;AAGA,WAAO,CAAC,CAAC,SAAS,aAAa,MAAM,CAAC,CAAC,SAAS,aAAa;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA,EAKA,SACE,SACA,SACoC;AAEpC,QAAI,CAAC,SAAS,aAAa,IAAI;AAC7B,aAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,IACF;AAEA,QAAI,CAAC,SAAS,aAAa,QAAQ;AACjC,aAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,IACF;AAGA,UAAM,gBAAgB,CAAC,WAAW,SAAS,eAAe,YAAY;AACtE,QAAI,CAAC,cAAc,SAAS,QAAQ,YAAY,MAAM,GAAG;AACvD,aAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO,yBAAyB,QAAQ,YAAY,MAAM;AAAA,MAC5D;AAAA,IACF;AAGA,QAAI,SAAS,aAAa,CAAC,QAAQ,UAAU,SAAS,kBAAkB,GAAG;AACzE,aAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO,EAAE,OAAO,KAAK;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAkB,SAAyE;AAC/F,UAAM,cAAc,SAAS,eAAe,CAAC;AAC7C,UAAM,QAAQ,YAAY;AAC1B,UAAM,SAAS,YAAY;AAG3B,QAAI;AACJ,QAAI,WAAW,WAAW;AACxB,kBAAY;AAAA,IACd,WAAW,WAAW,WAAW,WAAW,iBAAiB,WAAW,cAAc;AACpF,kBAAY;AAAA,IACd,OAAO;AACL,kBAAY;AAAA,IACd;AAGA,QAAI,WAAW,aAAa,WAAW,OAAO,YAAY,UAAU;AAClE,YAAM,aAAa;AAEnB,UAAI,WAAW,WAAW,WAAW,KAAK;AAExC,cAAM,OAAO,WAAW,QACrB,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE,YAAY,EACjD,IAAI,CAAC,MAAM,EAAE,aAAc,CAAC,GAAG,WAAW,EAAE,EAC5C,KAAK,GAAG;AAGX,cAAM,aAAa,oBAAI,IAAY;AACnC,mBAAW,QAAQ,QAAQ,CAAC,MAAM;AAChC,cAAI,EAAE,cAAc;AAClB,kBAAM,UAAU,EAAE,aAAa,CAAC,GAAG;AACnC,gBAAI,QAAS,YAAW,IAAI,OAAO;AAAA,UACrC;AAAA,QACF,CAAC;AAED,cAAM,WACJ,WAAW,OAAO,IACd,MAAM,KAAK,UAAU,EAAE,IAAI,CAAC,QAAQ;AAAA,UAClC;AAAA,UACA,OAAO,WAAW,EAAE;AAAA,QACtB,EAAE,IACF;AAEN,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf;AAAA,UACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC,MAAM;AAAA,YACJ,IAAI;AAAA,YACJ;AAAA,YACA,QAAQ;AAAA,YACR,UAAU,WAAW,SAAS,sBAAsB;AAAA,YACpD,UAAU,WAAW,IAAI;AAAA,YACzB;AAAA,YACA,WAAW,WAAW,IAAI;AAAA,UAC5B;AAAA,UACA,KAAK;AAAA,QACP;AAAA,MACF;AAAA,IACF;AAGA,WAAO;AAAA,MACL,SAAS,WAAW;AAAA,MACpB,UAAU,KAAK;AAAA,MACf;AAAA,MACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MAClC,MAAM;AAAA,QACJ,IAAI;AAAA,QACJ,MAAM;AAAA,QACN,QAAQ,WAAW,YAAY,cAAc;AAAA,MAC/C;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AACF;;;ACrDO,IAAM,gBAAN,MAAoB;AAAA,EAGzB,cAAc;AAEZ,SAAK,WAAW,oBAAI,IAAI;AAAA,MACtB,CAAC,UAAU,IAAI,qBAAqB,CAAC;AAAA,MACrC,CAAC,cAAc,IAAI,yBAAyB,CAAC;AAAA,MAC7C,CAAC,YAAY,IAAI,uBAAuB,CAAC;AAAA,MACzC,CAAC,aAAa,IAAI,oBAAoB,CAAC;AAAA,MACvC,CAAC,gBAAgB,IAAI,2BAA2B,CAAC;AAAA,IACnD,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,SAAkB,SAAqD;AAE3E,QAAI,SAAS,UAAU;AACrB,aAAO,KAAK,gBAAgB,SAAS,QAAQ,UAAU,OAAO;AAAA,IAChE;AAGA,UAAM,mBAAmB,KAAK,eAAe,SAAS;AAAA,MACpD,aAAa,SAAS;AAAA,MACtB,WAAW,SAAS;AAAA,IACtB,CAAC;AAED,QAAI,CAAC,kBAAkB;AACrB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,OAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO,KAAK,gBAAgB,SAAS,kBAAkB,OAAO;AAAA,EAChE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,eACE,SACA,SACmC;AAEnC,eAAW,CAAC,UAAU,OAAO,KAAK,KAAK,UAAU;AAC/C,UAAI,QAAQ,QAAQ,SAAS,OAAO,GAAG;AACrC,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,SAAS,SAAkB,SAAmD;AAE5E,QAAI,SAAS,UAAU;AACrB,YAAMC,WAAU,KAAK,SAAS,IAAI,QAAQ,QAAQ;AAClD,UAAI,CAACA,UAAS;AACZ,eAAO;AAAA,UACL,OAAO;AAAA,UACP,OAAO,qBAAqB,QAAQ,QAAQ;AAAA,QAC9C;AAAA,MACF;AACA,aAAOA,SAAQ,SAAS,SAAS;AAAA,QAC/B,aAAa,QAAQ;AAAA,QACrB,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IACH;AAGA,UAAM,mBAAmB,KAAK,eAAe,SAAS;AAAA,MACpD,aAAa,SAAS;AAAA,MACtB,WAAW,SAAS;AAAA,IACtB,CAAC;AAED,QAAI,CAAC,kBAAkB;AACrB,aAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,IACF;AAEA,UAAM,UAAU,KAAK,SAAS,IAAI,gBAAgB;AAClD,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO,mCAAmC,gBAAgB;AAAA,MAC5D;AAAA,IACF;AAEA,WAAO,QAAQ,SAAS,SAAS;AAAA,MAC/B,aAAa,SAAS;AAAA,MACtB,WAAW,SAAS;AAAA,IACtB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,OACE,SACA,UACA,SACS;AACT,UAAM,UAAU,KAAK,SAAS,IAAI,QAAQ;AAC1C,QAAI,CAAC,WAAW,CAAC,QAAQ,QAAQ;AAE/B,aAAO;AAAA,IACT;AAEA,WAAO,QAAQ,OAAO,SAAS,OAAO;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKQ,gBACN,SACA,UACA,SACqB;AACrB,UAAM,UAAU,KAAK,SAAS,IAAI,QAAQ;AAE1C,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,QACL,SAAS;AAAA,QACT,OAAO,mCAAmC,QAAQ;AAAA,MACpD;AAAA,IACF;AAGA,QAAI,WAAW;AACf,QAAI,SAAS,oBAAoB,SAAS,SAAS,gBAAgB,QAAQ,QAAQ;AACjF,iBAAW,QAAQ,OAAO,SAAS,QAAQ,YAAY;AACvD,UAAI,CAAC,UAAU;AACb,eAAO;AAAA,UACL,SAAS;AAAA,UACT;AAAA,UACA,OAAO;AAAA,UACP,UAAU;AAAA,QACZ;AAAA,MACF;AAAA,IACF;AAGA,UAAM,aAAa,QAAQ,SAAS,SAAS;AAAA,MAC3C,aAAa,SAAS;AAAA,MACtB,WAAW,SAAS;AAAA,IACtB,CAAC;AACD,QAAI,CAAC,WAAW,OAAO;AACrB,aAAO;AAAA,QACL,SAAS;AAAA,QACT;AAAA,QACA,OAAO,WAAW;AAAA,QAClB;AAAA,MACF;AAAA,IACF;AAGA,QAAI;AACF,YAAM,QAAQ,QAAQ,MAAM,SAAS;AAAA,QACnC,aAAa,SAAS;AAAA,MACxB,CAAC;AAED,aAAO;AAAA,QACL,SAAS;AAAA,QACT;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,aAAO;AAAA,QACL,SAAS;AAAA,QACT;AAAA,QACA,OAAO,4BAA4B,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,QAC3F;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,WAAW,UAAiE;AAC1E,WAAO,KAAK,SAAS,IAAI,QAAQ;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAwC;AACtC,WAAO,MAAM,KAAK,KAAK,SAAS,KAAK,CAAC;AAAA,EACxC;AACF;AAKO,SAAS,sBAAqC;AACnD,SAAO,IAAI,cAAc;AAC3B;","names":["schema_exports","axios","axios","axios","WebSocket","import_ws","import_axios","schema_exports","axios","axios","WebSocket","import_axios","import_ws","axios","WebSocket","import_axios","import_axios","axios","axios","axios","import_axios","import_axios","axios","response","axios","import_axios","axios","crypto","import_node_crypto","crypto","handler"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/router/voice-router.ts","../src/generated/deepgram/schema/listenV1EncodingParameter.ts","../src/generated/deepgram/schema/speakV1EncodingParameter.ts","../src/generated/deepgram/schema/speakV1ContainerParameter.ts","../src/generated/deepgram/schema/speakV1SampleRateParameter.ts","../src/generated/gladia/schema/streamingSupportedEncodingEnum.ts","../src/generated/gladia/schema/streamingSupportedSampleRateEnum.ts","../src/generated/gladia/schema/streamingSupportedBitDepthEnum.ts","../src/constants/defaults.ts","../src/utils/errors.ts","../src/adapters/base-adapter.ts","../src/adapters/gladia-adapter.ts","../src/router/audio-encoding-types.ts","../src/utils/websocket-helpers.ts","../src/utils/validation.ts","../src/utils/transcription-helpers.ts","../src/generated/gladia/api/gladiaControlAPI.ts","../src/generated/gladia/schema/index.ts","../src/generated/gladia/schema/audioChunkAckMessageType.ts","../src/generated/gladia/schema/audioChunkActionType.ts","../src/generated/gladia/schema/audioToTextControllerAudioTranscriptionBodyLanguage.ts","../src/generated/gladia/schema/audioToTextControllerAudioTranscriptionBodyLanguageBehaviour.ts","../src/generated/gladia/schema/audioToTextControllerAudioTranscriptionBodyOutputFormat.ts","../src/generated/gladia/schema/audioToTextControllerAudioTranscriptionBodyTargetTranslationLanguage.ts","../src/generated/gladia/schema/callbackLiveAudioChunkAckMessageEvent.ts","../src/generated/gladia/schema/callbackLiveEndRecordingMessageEvent.ts","../src/generated/gladia/schema/callbackLiveEndSessionMessageEvent.ts","../src/generated/gladia/schema/callbackLiveNamedEntityRecognitionMessageEvent.ts","../src/generated/gladia/schema/callbackLivePostChapterizationMessageEvent.ts","../src/generated/gladia/schema/callbackLivePostFinalTranscriptMessageEvent.ts","../src/generated/gladia/schema/callbackLivePostSummarizationMessageEvent.ts","../src/generated/gladia/schema/callbackLivePostTranscriptMessageEvent.ts","../src/generated/gladia/schema/callbackLiveSentimentAnalysisMessageEvent.ts","../src/generated/gladia/schema/callbackLiveSpeechEndMessageEvent.ts","../src/generated/gladia/schema/callbackLiveSpeechStartMessageEvent.ts","../src/generated/gladia/schema/callbackLiveStartRecordingMessageEvent.ts","../src/generated/gladia/schema/callbackLiveStartSessionMessageEvent.ts","../src/generated/gladia/schema/callbackLiveStopRecordingAckMessageEvent.ts","../src/generated/gladia/schema/callbackLiveTranscriptMessageEvent.ts","../src/generated/gladia/schema/callbackLiveTranslationMessageEvent.ts","../src/generated/gladia/schema/callbackMethodEnum.ts","../src/generated/gladia/schema/callbackTranscriptionErrorPayloadEvent.ts","../src/generated/gladia/schema/callbackTranscriptionSuccessPayloadEvent.ts","../src/generated/gladia/schema/endRecordingMessageType.ts","../src/generated/gladia/schema/endSessionMessageType.ts","../src/generated/gladia/schema/historyControllerGetListV1KindItem.ts","../src/generated/gladia/schema/historyControllerGetListV1StatusItem.ts","../src/generated/gladia/schema/namedEntityRecognitionMessageType.ts","../src/generated/gladia/schema/postChapterizationMessageType.ts","../src/generated/gladia/schema/postFinalTranscriptMessageType.ts","../src/generated/gladia/schema/postSummarizationMessageType.ts","../src/generated/gladia/schema/postTranscriptMessageType.ts","../src/generated/gladia/schema/preRecordedControllerGetPreRecordedJobsV2StatusItem.ts","../src/generated/gladia/schema/preRecordedResponseKind.ts","../src/generated/gladia/schema/preRecordedResponseStatus.ts","../src/generated/gladia/schema/sentimentAnalysisMessageType.ts","../src/generated/gladia/schema/speechEndMessageType.ts","../src/generated/gladia/schema/speechStartMessageType.ts","../src/generated/gladia/schema/startRecordingMessageType.ts","../src/generated/gladia/schema/startSessionMessageType.ts","../src/generated/gladia/schema/stopRecordingAckMessageType.ts","../src/generated/gladia/schema/stopRecordingActionType.ts","../src/generated/gladia/schema/streamingControllerGetStreamingJobsV2StatusItem.ts","../src/generated/gladia/schema/streamingResponseKind.ts","../src/generated/gladia/schema/streamingResponseStatus.ts","../src/generated/gladia/schema/streamingSupportedModels.ts","../src/generated/gladia/schema/streamingSupportedRegions.ts","../src/generated/gladia/schema/subtitlesFormatEnum.ts","../src/generated/gladia/schema/subtitlesStyleEnum.ts","../src/generated/gladia/schema/summaryTypesEnum.ts","../src/generated/gladia/schema/transcriptionControllerListV2KindItem.ts","../src/generated/gladia/schema/transcriptionControllerListV2StatusItem.ts","../src/generated/gladia/schema/transcriptionLanguageCodeEnum.ts","../src/generated/gladia/schema/transcriptMessageType.ts","../src/generated/gladia/schema/translationLanguageCodeEnum.ts","../src/generated/gladia/schema/translationMessageType.ts","../src/generated/gladia/schema/translationModelEnum.ts","../src/generated/gladia/schema/videoToTextControllerVideoTranscriptionBodyLanguage.ts","../src/generated/gladia/schema/videoToTextControllerVideoTranscriptionBodyLanguageBehaviour.ts","../src/generated/gladia/schema/videoToTextControllerVideoTranscriptionBodyOutputFormat.ts","../src/generated/gladia/schema/videoToTextControllerVideoTranscriptionBodyTargetTranslationLanguage.ts","../src/generated/gladia/schema/webhookLiveEndRecordingPayloadEvent.ts","../src/generated/gladia/schema/webhookLiveEndSessionPayloadEvent.ts","../src/generated/gladia/schema/webhookLiveStartRecordingPayloadEvent.ts","../src/generated/gladia/schema/webhookLiveStartSessionPayloadEvent.ts","../src/generated/gladia/schema/webhookTranscriptionCreatedPayloadEvent.ts","../src/generated/gladia/schema/webhookTranscriptionErrorPayloadEvent.ts","../src/generated/gladia/schema/webhookTranscriptionSuccessPayloadEvent.ts","../src/adapters/assemblyai-adapter.ts","../src/generated/assemblyai/api/assemblyAIAPI.ts","../src/generated/assemblyai/schema/index.ts","../src/generated/assemblyai/schema/audioIntelligenceModelStatus.ts","../src/generated/assemblyai/schema/entityType.ts","../src/generated/assemblyai/schema/lemurModel.ts","../src/generated/assemblyai/schema/piiPolicy.ts","../src/generated/assemblyai/schema/redactedAudioStatus.ts","../src/generated/assemblyai/schema/redactPiiAudioQuality.ts","../src/generated/assemblyai/schema/sentiment.ts","../src/generated/assemblyai/schema/speechModel.ts","../src/generated/assemblyai/schema/substitutionPolicy.ts","../src/generated/assemblyai/schema/subtitleFormat.ts","../src/generated/assemblyai/schema/summaryModel.ts","../src/generated/assemblyai/schema/summaryType.ts","../src/generated/assemblyai/schema/transcriptBoostParam.ts","../src/generated/assemblyai/schema/transcriptLanguageCode.ts","../src/generated/assemblyai/schema/transcriptReadyStatus.ts","../src/generated/assemblyai/schema/transcriptStatus.ts","../src/adapters/deepgram-adapter.ts","../src/adapters/azure-stt-adapter.ts","../src/generated/azure/api/speechServicesAPIV31.ts","../src/adapters/openai-whisper-adapter.ts","../src/generated/openai/api/openAIAPI.ts","../src/adapters/speechmatics-adapter.ts","../src/webhooks/base-webhook.ts","../src/webhooks/gladia-webhook.ts","../src/webhooks/assemblyai-webhook.ts","../src/webhooks/deepgram-webhook.ts","../src/webhooks/azure-webhook.ts","../src/webhooks/speechmatics-webhook.ts","../src/webhooks/webhook-router.ts"],"sourcesContent":["/**\n * Voice Router SDK - Multi-Provider Transcription API\n * Unified interface for Gladia, AssemblyAI, Deepgram, and more\n */\n\n// Main Voice Router exports\nexport * from \"./router\"\nexport * from \"./adapters\"\n\n// Webhook normalization exports\nexport * from \"./webhooks\"\n\n// Provider-specific generated types (for advanced usage)\nexport * as GladiaTypes from \"./generated/gladia/schema\"\nexport * as AssemblyAITypes from \"./generated/assemblyai/schema\"\n","/**\n * VoiceRouter - Unified transcription API bridge\n * Provides a provider-agnostic interface for multiple Speech-to-Text services\n */\n\nimport type { TranscriptionAdapter, ProviderConfig } from \"../adapters/base-adapter\"\nimport type {\n AudioInput,\n StreamEvent,\n StreamingCallbacks,\n StreamingOptions,\n StreamingSession,\n TranscribeOptions,\n TranscriptionProvider,\n UnifiedTranscriptResponse\n} from \"./types\"\nimport type {\n GladiaStreamingOptions,\n DeepgramStreamingOptions,\n AssemblyAIStreamingOptions\n} from \"./provider-streaming-types\"\n\n/**\n * Configuration for VoiceRouter\n */\nexport interface VoiceRouterConfig {\n /**\n * Provider configurations\n * Key: provider name, Value: provider config\n */\n providers: Partial<Record<TranscriptionProvider, ProviderConfig>>\n\n /**\n * Default provider to use when not specified\n */\n defaultProvider?: TranscriptionProvider\n\n /**\n * Strategy for provider selection when multiple providers are configured\n * - 'explicit': Always require provider to be specified (throws error if not)\n * - 'default': Use defaultProvider if not specified\n * - 'round-robin': Rotate between providers for load balancing\n * - 'fastest': Choose provider with lowest current queue (future feature)\n */\n selectionStrategy?: \"explicit\" | \"default\" | \"round-robin\"\n}\n\n/**\n * VoiceRouter - Main class for provider-agnostic transcription\n *\n * Provides a unified interface across multiple Speech-to-Text providers\n * (Gladia, AssemblyAI, Deepgram, etc.). Automatically handles provider\n * selection, adapter management, and response normalization.\n *\n * @example Basic usage with single provider\n * ```typescript\n * import { VoiceRouter, GladiaAdapter } from '@meeting-baas/sdk';\n *\n * const router = new VoiceRouter({\n * providers: {\n * gladia: { apiKey: process.env.GLADIA_API_KEY }\n * },\n * defaultProvider: 'gladia'\n * });\n *\n * router.registerAdapter(new GladiaAdapter());\n *\n * const result = await router.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * });\n *\n * console.log(result.data.text);\n * ```\n *\n * @example Multi-provider with round-robin\n * ```typescript\n * const router = new VoiceRouter({\n * providers: {\n * gladia: { apiKey: process.env.GLADIA_API_KEY },\n * assemblyai: { apiKey: process.env.ASSEMBLYAI_API_KEY }\n * },\n * selectionStrategy: 'round-robin'\n * });\n *\n * router.registerAdapter(new GladiaAdapter());\n * router.registerAdapter(new AssemblyAIAdapter());\n *\n * // Automatically alternates between providers\n * await router.transcribe(audio1); // Uses Gladia\n * await router.transcribe(audio2); // Uses AssemblyAI\n * await router.transcribe(audio3); // Uses Gladia again\n * ```\n */\nexport class VoiceRouter {\n private adapters: Map<TranscriptionProvider, TranscriptionAdapter> = new Map()\n private config: VoiceRouterConfig\n private roundRobinIndex = 0\n\n constructor(config: VoiceRouterConfig) {\n this.config = {\n selectionStrategy: \"default\",\n ...config\n }\n\n // Validate configuration\n if (Object.keys(config.providers).length === 0) {\n throw new Error(\"VoiceRouter requires at least one provider configuration\")\n }\n\n // If using default strategy, ensure a default provider is set\n if (this.config.selectionStrategy === \"default\" && !this.config.defaultProvider) {\n // Auto-select first provider as default\n this.config.defaultProvider = Object.keys(config.providers)[0] as TranscriptionProvider\n }\n }\n\n /**\n * Register an adapter for a provider\n *\n * Call this method for each provider you want to use. The adapter will be\n * initialized with the configuration provided in the constructor.\n *\n * @param adapter - Provider adapter instance to register\n * @throws {Error} If no configuration found for the provider\n *\n * @example\n * ```typescript\n * const router = new VoiceRouter({\n * providers: {\n * gladia: { apiKey: 'YOUR_KEY' }\n * }\n * });\n *\n * router.registerAdapter(new GladiaAdapter());\n * ```\n */\n registerAdapter(adapter: TranscriptionAdapter): void {\n // Initialize adapter with config\n const providerConfig = this.config.providers[adapter.name]\n if (!providerConfig) {\n throw new Error(`No configuration found for provider: ${adapter.name}`)\n }\n\n adapter.initialize(providerConfig)\n this.adapters.set(adapter.name, adapter)\n }\n\n /**\n * Get an adapter by provider name\n */\n getAdapter(provider: TranscriptionProvider): TranscriptionAdapter {\n const adapter = this.adapters.get(provider)\n if (!adapter) {\n throw new Error(\n `Provider '${provider}' is not registered. Available providers: ${Array.from(this.adapters.keys()).join(\", \")}`\n )\n }\n return adapter\n }\n\n /**\n * Select provider based on configured strategy\n */\n private selectProvider(preferredProvider?: TranscriptionProvider): TranscriptionProvider {\n // If provider explicitly specified, use it\n if (preferredProvider) {\n if (!this.adapters.has(preferredProvider)) {\n throw new Error(\n `Provider '${preferredProvider}' is not registered. Available providers: ${Array.from(this.adapters.keys()).join(\", \")}`\n )\n }\n return preferredProvider\n }\n\n // Apply selection strategy\n switch (this.config.selectionStrategy) {\n case \"explicit\":\n throw new Error(\n \"Provider must be explicitly specified when using 'explicit' selection strategy\"\n )\n\n case \"round-robin\": {\n const providers = Array.from(this.adapters.keys())\n const provider = providers[this.roundRobinIndex % providers.length]\n this.roundRobinIndex++\n return provider\n }\n\n case \"default\":\n default:\n if (!this.config.defaultProvider) {\n throw new Error(\"No default provider configured\")\n }\n return this.config.defaultProvider\n }\n }\n\n /**\n * Transcribe audio using a specific provider or the default\n *\n * Submit audio for transcription. The provider will be selected based on\n * your configuration strategy (explicit, default, or round-robin).\n *\n * @param audio - Audio input (URL, file buffer, or stream)\n * @param options - Transcription options (language, diarization, etc.)\n * @param options.provider - Specific provider to use (overrides selection strategy)\n * @returns Unified transcription response with normalized format\n * @throws {Error} If provider not registered or selection fails\n *\n * @example URL audio\n * ```typescript\n * const result = await router.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en',\n * diarization: true,\n * summarization: true\n * });\n *\n * if (result.success) {\n * console.log('Transcript:', result.data.text);\n * console.log('Speakers:', result.data.speakers);\n * console.log('Summary:', result.data.summary);\n * }\n * ```\n *\n * @example Specific provider\n * ```typescript\n * const result = await router.transcribe(audio, {\n * provider: 'gladia', // Force use of Gladia\n * language: 'en'\n * });\n * ```\n */\n async transcribe(\n audio: AudioInput,\n options?: TranscribeOptions & { provider?: TranscriptionProvider }\n ): Promise<UnifiedTranscriptResponse> {\n const provider = this.selectProvider(options?.provider)\n const adapter = this.getAdapter(provider)\n\n // Remove provider from options before passing to adapter\n const { provider: _, ...adapterOptions } = options || {}\n\n return adapter.transcribe(audio, adapterOptions)\n }\n\n /**\n * Get transcription result by ID\n * Provider must be specified since IDs are provider-specific\n */\n async getTranscript(\n transcriptId: string,\n provider: TranscriptionProvider\n ): Promise<UnifiedTranscriptResponse> {\n const adapter = this.getAdapter(provider)\n return adapter.getTranscript(transcriptId)\n }\n\n /**\n * Stream audio for real-time transcription with Gladia\n *\n * @param options - Gladia-specific streaming options (type-safe from OpenAPI spec)\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n *\n * @example Gladia streaming (type-safe!)\n * ```typescript\n * const session = await router.transcribeStream({\n * provider: 'gladia',\n * encoding: 'wav/pcm', // ✅ Only Gladia encodings allowed\n * sampleRate: 16000, // ✅ Only 8000, 16000, 32000, 44100, 48000\n * channels: 1\n * }, {\n * onTranscript: (event) => console.log(event.text),\n * onError: (error) => console.error(error)\n * });\n * ```\n */\n transcribeStream(\n options: GladiaStreamingOptions & { provider: \"gladia\" },\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession>\n\n /**\n * Stream audio for real-time transcription with Deepgram\n *\n * @param options - Deepgram-specific streaming options (type-safe from OpenAPI spec)\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n *\n * @example Deepgram streaming (type-safe!)\n * ```typescript\n * const session = await router.transcribeStream({\n * provider: 'deepgram',\n * encoding: 'linear16', // ✅ Only Deepgram encodings allowed\n * sampleRate: 16000,\n * language: 'en',\n * diarization: true\n * }, {\n * onTranscript: (event) => console.log(event.text)\n * });\n * ```\n */\n transcribeStream(\n options: DeepgramStreamingOptions & { provider: \"deepgram\" },\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession>\n\n /**\n * Stream audio for real-time transcription with AssemblyAI\n *\n * @param options - AssemblyAI-specific streaming options (type-safe from OpenAPI spec)\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n *\n * @example AssemblyAI streaming (type-safe!)\n * ```typescript\n * const session = await router.transcribeStream({\n * provider: 'assemblyai',\n * sampleRate: 16000 // ✅ Only supported sample rates\n * }, {\n * onTranscript: (event) => console.log(event.text)\n * });\n * ```\n */\n transcribeStream(\n options: AssemblyAIStreamingOptions & { provider: \"assemblyai\" },\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession>\n\n /**\n * Stream audio for real-time transcription (uses default provider)\n *\n * @param options - Generic streaming options\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n */\n transcribeStream(\n options?: StreamingOptions,\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession>\n\n // Implementation\n async transcribeStream(\n options?:\n | (GladiaStreamingOptions & { provider: \"gladia\" })\n | (DeepgramStreamingOptions & { provider: \"deepgram\" })\n | (AssemblyAIStreamingOptions & { provider: \"assemblyai\" })\n | (StreamingOptions & { provider?: TranscriptionProvider }),\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession> {\n const provider = this.selectProvider(options?.provider)\n const adapter = this.getAdapter(provider)\n\n // Check if adapter supports streaming\n if (!adapter.capabilities.streaming || !adapter.transcribeStream) {\n throw new Error(`Provider '${provider}' does not support streaming transcription`)\n }\n\n // Remove provider from options before passing to adapter\n // Cast to StreamingOptions since adapter will handle provider-specific conversions\n const { provider: _, ...adapterOptions } = options || {}\n\n return adapter.transcribeStream(adapterOptions as StreamingOptions, callbacks)\n }\n\n /**\n * Delete a transcription\n * Not all providers support this operation\n */\n async deleteTranscript(\n transcriptId: string,\n provider: TranscriptionProvider\n ): Promise<{ success: boolean }> {\n const adapter = this.getAdapter(provider)\n\n if (!adapter.deleteTranscript) {\n throw new Error(`Provider '${provider}' does not support deleting transcripts`)\n }\n\n return adapter.deleteTranscript(transcriptId)\n }\n\n /**\n * List recent transcriptions\n * Not all providers support this operation\n */\n async listTranscripts(\n provider: TranscriptionProvider,\n options?: {\n limit?: number\n offset?: number\n status?: string\n }\n ): Promise<{\n transcripts: UnifiedTranscriptResponse[]\n total?: number\n hasMore?: boolean\n }> {\n const adapter = this.getAdapter(provider)\n\n if (!adapter.listTranscripts) {\n throw new Error(`Provider '${provider}' does not support listing transcripts`)\n }\n\n return adapter.listTranscripts(options)\n }\n\n /**\n * Get capabilities for a specific provider\n */\n getProviderCapabilities(provider: TranscriptionProvider) {\n const adapter = this.getAdapter(provider)\n return adapter.capabilities\n }\n\n /**\n * Get all registered providers\n */\n getRegisteredProviders(): TranscriptionProvider[] {\n return Array.from(this.adapters.keys())\n }\n\n /**\n * Get raw provider client for advanced usage\n */\n getRawProviderClient(provider: TranscriptionProvider): unknown {\n const adapter = this.getAdapter(provider)\n\n if (!adapter.getRawClient) {\n throw new Error(`Provider '${provider}' does not expose a raw client`)\n }\n\n return adapter.getRawClient()\n }\n}\n\n/**\n * Factory function to create a VoiceRouter with auto-registered adapters\n */\nexport function createVoiceRouter(\n config: VoiceRouterConfig,\n adapters?: TranscriptionAdapter[]\n): VoiceRouter {\n const router = new VoiceRouter(config)\n\n // Register provided adapters\n if (adapters && adapters.length > 0) {\n for (const adapter of adapters) {\n router.registerAdapter(adapter)\n }\n }\n\n return router\n}\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Deepgram API Specification\n * APIs for speech-to-text transcription, text-to-speech synthesis, language understanding, and account management.\n\n * OpenAPI spec version: 1.0.0\n */\n\n/**\n * ListenV1EncodingParameter type definition\n */\n\n/**\n * ListenV1EncodingParameter type definition\n */\n\n/**\n * ListenV1EncodingParameter type definition\n */\n\n/**\n * ListenV1EncodingParameter type definition\n */\n\n/**\n * ListenV1EncodingParameter type definition\n */\n\n/**\n * ListenV1EncodingParameter type definition\n */\n\n/**\n * ListenV1EncodingParameter type definition\n */\n\n/**\n * ListenV1EncodingParameter type definition\n */\nexport type ListenV1EncodingParameter = typeof ListenV1EncodingParameter[keyof typeof ListenV1EncodingParameter];\n\nexport const ListenV1EncodingParameter = {\n linear16: \"linear16\",\n flac: \"flac\",\n mulaw: \"mulaw\",\n opus: \"opus\",\n speex: \"speex\",\n g729: \"g729\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Deepgram API Specification\n * APIs for speech-to-text transcription, text-to-speech synthesis, language understanding, and account management.\n\n * OpenAPI spec version: 1.0.0\n */\n\n/**\n * SpeakV1EncodingParameter type definition\n */\n\n/**\n * SpeakV1EncodingParameter type definition\n */\n\n/**\n * SpeakV1EncodingParameter type definition\n */\n\n/**\n * SpeakV1EncodingParameter type definition\n */\n\n/**\n * SpeakV1EncodingParameter type definition\n */\n\n/**\n * SpeakV1EncodingParameter type definition\n */\n\n/**\n * SpeakV1EncodingParameter type definition\n */\nexport type SpeakV1EncodingParameter = typeof SpeakV1EncodingParameter[keyof typeof SpeakV1EncodingParameter];\n\nexport const SpeakV1EncodingParameter = {\n linear16: \"linear16\",\n aac: \"aac\",\n opus: \"opus\",\n mp3: \"mp3\",\n flac: \"flac\",\n mulaw: \"mulaw\",\n alaw: \"alaw\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Deepgram API Specification\n * APIs for speech-to-text transcription, text-to-speech synthesis, language understanding, and account management.\n\n * OpenAPI spec version: 1.0.0\n */\n\n/**\n * SpeakV1ContainerParameter type definition\n */\n\n/**\n * SpeakV1ContainerParameter type definition\n */\n\n/**\n * SpeakV1ContainerParameter type definition\n */\n\n/**\n * SpeakV1ContainerParameter type definition\n */\n\n/**\n * SpeakV1ContainerParameter type definition\n */\n\n/**\n * SpeakV1ContainerParameter type definition\n */\n\n/**\n * SpeakV1ContainerParameter type definition\n */\nexport type SpeakV1ContainerParameter = typeof SpeakV1ContainerParameter[keyof typeof SpeakV1ContainerParameter];\n\nexport const SpeakV1ContainerParameter = {\n none: \"none\",\n wav: \"wav\",\n ogg: \"ogg\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Deepgram API Specification\n * APIs for speech-to-text transcription, text-to-speech synthesis, language understanding, and account management.\n\n * OpenAPI spec version: 1.0.0\n */\n\n/**\n * SpeakV1SampleRateParameter type definition\n */\n\n/**\n * SpeakV1SampleRateParameter type definition\n */\n\n/**\n * SpeakV1SampleRateParameter type definition\n */\n\n/**\n * SpeakV1SampleRateParameter type definition\n */\n\n/**\n * SpeakV1SampleRateParameter type definition\n */\n\n/**\n * SpeakV1SampleRateParameter type definition\n */\n\n/**\n * SpeakV1SampleRateParameter type definition\n */\n\n/**\n * SpeakV1SampleRateParameter type definition\n */\nexport type SpeakV1SampleRateParameter = typeof SpeakV1SampleRateParameter[keyof typeof SpeakV1SampleRateParameter];\n\nexport const SpeakV1SampleRateParameter = {\n NUMBER_16000: 16000,\n NUMBER_24000: 24000,\n NUMBER_32000: 32000,\n NUMBER_48000: 48000,\n null: null,\n NUMBER_8000: 8000,\n NUMBER_22050: 22050\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * The encoding format of the audio stream. Supported formats: \n- PCM: 8, 16, 24, and 32 bits \n- A-law: 8 bits \n- μ-law: 8 bits \n\nNote: No need to add WAV headers to raw audio as the API supports both formats.\n */\nexport type StreamingSupportedEncodingEnum =\n (typeof StreamingSupportedEncodingEnum)[keyof typeof StreamingSupportedEncodingEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingSupportedEncodingEnum = {\n \"wav/pcm\": \"wav/pcm\",\n \"wav/alaw\": \"wav/alaw\",\n \"wav/ulaw\": \"wav/ulaw\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * The sample rate of the audio stream\n */\nexport type StreamingSupportedSampleRateEnum =\n (typeof StreamingSupportedSampleRateEnum)[keyof typeof StreamingSupportedSampleRateEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingSupportedSampleRateEnum = {\n NUMBER_8000: 8000,\n NUMBER_16000: 16000,\n NUMBER_32000: 32000,\n NUMBER_44100: 44100,\n NUMBER_48000: 48000\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * The bit depth of the audio stream\n */\nexport type StreamingSupportedBitDepthEnum =\n (typeof StreamingSupportedBitDepthEnum)[keyof typeof StreamingSupportedBitDepthEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingSupportedBitDepthEnum = {\n NUMBER_8: 8,\n NUMBER_16: 16,\n NUMBER_24: 24,\n NUMBER_32: 32\n} as const\n","/**\n * Default configuration constants for Voice Router SDK\n *\n * These constants provide sensible defaults for timeouts, polling intervals,\n * and other configuration values used across all adapters.\n */\n\n/**\n * Default timeout values for different operation types (in milliseconds)\n */\nexport const DEFAULT_TIMEOUTS = {\n /** Standard HTTP request timeout for API calls (60 seconds) */\n HTTP_REQUEST: 60000,\n\n /** Audio processing timeout for long audio files (120 seconds) */\n AUDIO_PROCESSING: 120000,\n\n /** WebSocket connection establishment timeout (10 seconds) */\n WS_CONNECTION: 10000,\n\n /** WebSocket graceful close timeout (5 seconds) */\n WS_CLOSE: 5000\n} as const\n\n/**\n * Default polling configuration for async transcription jobs\n */\nexport const DEFAULT_POLLING = {\n /** Maximum number of polling attempts before timing out */\n MAX_ATTEMPTS: 60,\n\n /** Standard interval between polling attempts (2 seconds) */\n INTERVAL_MS: 2000,\n\n /** Slower interval for long-running jobs (3 seconds) */\n SLOW_INTERVAL_MS: 3000\n} as const\n","/**\n * Standardized error handling utilities for Voice Router SDK\n *\n * Provides consistent error codes, messages, and formatting across all adapters.\n */\n\n/**\n * Standard error codes used across all providers\n *\n * These codes provide a consistent error taxonomy regardless of which\n * provider is being used.\n */\nexport const ERROR_CODES = {\n /** Failed to parse API response or WebSocket message */\n PARSE_ERROR: \"PARSE_ERROR\",\n\n /** WebSocket connection error */\n WEBSOCKET_ERROR: \"WEBSOCKET_ERROR\",\n\n /** Async transcription job did not complete within timeout */\n POLLING_TIMEOUT: \"POLLING_TIMEOUT\",\n\n /** Transcription processing failed on provider side */\n TRANSCRIPTION_ERROR: \"TRANSCRIPTION_ERROR\",\n\n /** Connection attempt timed out */\n CONNECTION_TIMEOUT: \"CONNECTION_TIMEOUT\",\n\n /** Invalid input provided to API */\n INVALID_INPUT: \"INVALID_INPUT\",\n\n /** Requested operation not supported by provider */\n NOT_SUPPORTED: \"NOT_SUPPORTED\",\n\n /** No transcription results available */\n NO_RESULTS: \"NO_RESULTS\",\n\n /** Unspecified or unknown error */\n UNKNOWN_ERROR: \"UNKNOWN_ERROR\"\n} as const\n\nexport type ErrorCode = (typeof ERROR_CODES)[keyof typeof ERROR_CODES]\n\n/**\n * Default error messages for each error code\n *\n * These can be overridden with custom messages when creating errors.\n */\nexport const ERROR_MESSAGES: Record<ErrorCode, string> = {\n PARSE_ERROR: \"Failed to parse response data\",\n WEBSOCKET_ERROR: \"WebSocket connection error\",\n POLLING_TIMEOUT: \"Transcription did not complete within timeout period\",\n TRANSCRIPTION_ERROR: \"Transcription processing failed\",\n CONNECTION_TIMEOUT: \"Connection attempt timed out\",\n INVALID_INPUT: \"Invalid input provided\",\n NOT_SUPPORTED: \"Operation not supported by this provider\",\n NO_RESULTS: \"No transcription results available\",\n UNKNOWN_ERROR: \"An unknown error occurred\"\n}\n\n/**\n * Standard error object structure\n */\nexport interface StandardError {\n /** Error code from ERROR_CODES */\n code: string\n /** Human-readable error message */\n message: string\n /** HTTP status code if applicable */\n statusCode?: number\n /** Additional error details */\n details?: unknown\n}\n\n/**\n * Create a standardized error object\n *\n * @param code - Error code from ERROR_CODES\n * @param customMessage - Optional custom message (defaults to standard message)\n * @param details - Optional additional error details\n * @returns Standardized error object\n *\n * @example\n * ```typescript\n * throw createError(ERROR_CODES.PARSE_ERROR, undefined, rawError)\n *\n * throw createError(\n * ERROR_CODES.TRANSCRIPTION_ERROR,\n * \"Audio file format not supported\",\n * { format: \"mp4\", supported: [\"wav\", \"mp3\"] }\n * )\n * ```\n */\nexport function createError(\n code: ErrorCode,\n customMessage?: string,\n details?: unknown\n): StandardError {\n return {\n code,\n message: customMessage || ERROR_MESSAGES[code],\n details\n }\n}\n\n/**\n * Create error from caught exception\n *\n * Safely extracts error information from unknown caught values.\n *\n * @param error - Caught error (any type)\n * @param defaultCode - Error code to use if not extractable\n * @param statusCode - HTTP status code if applicable\n * @returns Standardized error object\n *\n * @example\n * ```typescript\n * try {\n * await someOperation()\n * } catch (error) {\n * return { success: false, error: createErrorFromException(error) }\n * }\n * ```\n */\nexport function createErrorFromException(\n error: unknown,\n defaultCode: ErrorCode = ERROR_CODES.UNKNOWN_ERROR,\n statusCode?: number\n): StandardError {\n if (error instanceof Error) {\n const err = error as Error & { statusCode?: number; code?: string }\n return {\n code: err.code || defaultCode,\n message: err.message || ERROR_MESSAGES[defaultCode],\n statusCode: statusCode || err.statusCode,\n details: error\n }\n }\n\n return {\n code: defaultCode,\n message: String(error) || ERROR_MESSAGES[defaultCode],\n statusCode,\n details: error\n }\n}\n","/**\n * Base adapter interface for transcription providers\n * All provider adapters must implement this interface\n */\n\nimport type {\n AudioInput,\n ProviderCapabilities,\n StreamEvent,\n StreamingCallbacks,\n StreamingOptions,\n StreamingSession,\n TranscribeOptions,\n TranscriptionProvider,\n UnifiedTranscriptResponse\n} from \"../router/types\"\nimport { DEFAULT_TIMEOUTS, DEFAULT_POLLING } from \"../constants/defaults\"\nimport { ERROR_CODES, type ErrorCode } from \"../utils/errors\"\n\n/**\n * Provider configuration\n */\nexport interface ProviderConfig {\n /** API key for authentication */\n apiKey: string\n /** Base API URL (optional, uses provider default if not specified) */\n baseUrl?: string\n /** Request timeout in milliseconds */\n timeout?: number\n /** Custom headers to include in requests */\n headers?: Record<string, string>\n /** Additional provider-specific options */\n options?: Record<string, unknown>\n}\n\n/**\n * Base adapter interface that all provider adapters must implement\n */\nexport interface TranscriptionAdapter {\n /**\n * Provider name\n */\n readonly name: TranscriptionProvider\n\n /**\n * Provider capabilities\n */\n readonly capabilities: ProviderCapabilities\n\n /**\n * Initialize the adapter with configuration\n */\n initialize(config: ProviderConfig): void\n\n /**\n * Submit audio for transcription (async)\n * Returns immediately with a job ID that can be polled\n */\n transcribe(audio: AudioInput, options?: TranscribeOptions): Promise<UnifiedTranscriptResponse>\n\n /**\n * Get transcription result by ID\n * Used to poll for results after async submission\n */\n getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse>\n\n /**\n * Stream audio for real-time transcription (callback-based)\n * Only available if capabilities.streaming is true\n *\n * This method creates a streaming session that accepts audio chunks\n * and returns transcription results via callbacks.\n *\n * @param options - Streaming configuration options\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n *\n * @example\n * ```typescript\n * const session = await adapter.transcribeStream({\n * encoding: 'linear16',\n * sampleRate: 16000,\n * language: 'en'\n * }, {\n * onTranscript: (event) => console.log(event.text),\n * onError: (error) => console.error(error)\n * });\n *\n * // Send audio chunks\n * await session.sendAudio({ data: audioBuffer });\n *\n * // Close when done\n * await session.close();\n * ```\n */\n transcribeStream?(\n options?: StreamingOptions,\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession>\n\n /**\n * Stream audio for real-time transcription (async iterator)\n * Alternative streaming API that returns an async iterable\n * Only available if capabilities.streaming is true\n *\n * @deprecated Prefer transcribeStream() with callbacks for better control\n */\n transcribeStreamIterator?(\n audioStream: ReadableStream,\n options?: StreamingOptions\n ): AsyncIterable<StreamEvent>\n\n /**\n * Delete a transcription\n * Not all providers support deletion\n */\n deleteTranscript?(transcriptId: string): Promise<{ success: boolean }>\n\n /**\n * List recent transcriptions\n * Not all providers support listing\n */\n listTranscripts?(options?: { limit?: number; offset?: number; status?: string }): Promise<{\n transcripts: UnifiedTranscriptResponse[]\n total?: number\n hasMore?: boolean\n }>\n\n /**\n * Get provider-specific raw client\n * For advanced users who need direct access to provider APIs\n */\n getRawClient?(): unknown\n}\n\n/**\n * Abstract base class for adapters (optional convenience)\n * Providers can extend this or implement TranscriptionAdapter directly\n */\nexport abstract class BaseAdapter implements TranscriptionAdapter {\n abstract readonly name: TranscriptionProvider\n abstract readonly capabilities: ProviderCapabilities\n\n /**\n * Base URL for provider API (must be defined by subclass)\n */\n protected abstract baseUrl: string\n\n protected config?: ProviderConfig\n\n initialize(config: ProviderConfig): void {\n this.config = config\n }\n\n abstract transcribe(\n audio: AudioInput,\n options?: TranscribeOptions\n ): Promise<UnifiedTranscriptResponse>\n\n abstract getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse>\n\n /**\n * Helper method to create error responses with stack traces\n *\n * @param error - Error object or unknown error\n * @param statusCode - Optional HTTP status code\n * @param code - Optional error code (defaults to extracted or UNKNOWN_ERROR)\n */\n protected createErrorResponse(\n error: Error | unknown,\n statusCode?: number,\n code?: ErrorCode\n ): UnifiedTranscriptResponse {\n const err = error as Error & {\n statusCode?: number\n code?: string\n response?: { data?: any; status?: number; statusText?: string }\n }\n\n // Extract HTTP error details if present (axios errors)\n const httpStatus = statusCode || err.statusCode || err.response?.status\n const httpStatusText = err.response?.statusText\n const responseData = err.response?.data\n\n return {\n success: false,\n provider: this.name,\n error: {\n code: code || err.code || ERROR_CODES.UNKNOWN_ERROR,\n message: err.message || \"An unknown error occurred\",\n statusCode: httpStatus,\n details: {\n // Include full error object\n error: error,\n // Include stack trace if available\n stack: err.stack,\n // Include HTTP response details\n httpStatus,\n httpStatusText,\n responseData,\n // Include provider name for debugging\n provider: this.name\n }\n }\n }\n }\n\n /**\n * Helper method to validate configuration\n */\n protected validateConfig(): void {\n if (!this.config) {\n throw new Error(`Adapter ${this.name} is not initialized. Call initialize() first.`)\n }\n if (!this.config.apiKey) {\n throw new Error(`API key is required for ${this.name} provider`)\n }\n }\n\n /**\n * Build axios config for generated API client functions\n *\n * @param authHeaderName - Header name for API key (e.g., \"Authorization\", \"x-gladia-key\")\n * @param authHeaderValue - Optional function to format auth header value (defaults to raw API key)\n * @returns Axios config object\n */\n protected getAxiosConfig(\n authHeaderName: string = \"Authorization\",\n authHeaderValue?: (apiKey: string) => string\n ): {\n baseURL: string\n timeout: number\n headers: Record<string, string>\n } {\n this.validateConfig()\n\n const authValue = authHeaderValue ? authHeaderValue(this.config!.apiKey) : this.config!.apiKey\n\n return {\n baseURL: this.config!.baseUrl || this.baseUrl,\n timeout: this.config!.timeout || DEFAULT_TIMEOUTS.HTTP_REQUEST,\n headers: {\n [authHeaderName]: authValue,\n \"Content-Type\": \"application/json\",\n ...this.config!.headers\n }\n }\n }\n\n /**\n * Generic polling helper for async transcription jobs\n *\n * Polls getTranscript() until job completes or times out.\n *\n * @param transcriptId - Job/transcript ID to poll\n * @param options - Polling configuration\n * @returns Final transcription result\n */\n protected async pollForCompletion(\n transcriptId: string,\n options?: {\n maxAttempts?: number\n intervalMs?: number\n }\n ): Promise<UnifiedTranscriptResponse> {\n const { maxAttempts = DEFAULT_POLLING.MAX_ATTEMPTS, intervalMs = DEFAULT_POLLING.INTERVAL_MS } =\n options || {}\n\n for (let attempt = 0; attempt < maxAttempts; attempt++) {\n const result = await this.getTranscript(transcriptId)\n\n if (!result.success) {\n return result\n }\n\n const status = result.data?.status\n if (status === \"completed\") {\n return result\n }\n\n if (status === \"error\") {\n return this.createErrorResponse(\n new Error(\"Transcription failed\"),\n undefined,\n ERROR_CODES.TRANSCRIPTION_ERROR\n )\n }\n\n await new Promise((resolve) => setTimeout(resolve, intervalMs))\n }\n\n return {\n success: false,\n provider: this.name,\n error: {\n code: ERROR_CODES.POLLING_TIMEOUT,\n message: `Transcription did not complete after ${maxAttempts} attempts`\n }\n }\n }\n}\n","/**\n * Gladia transcription provider adapter\n * Documentation: https://docs.gladia.io/\n */\n\nimport axios from \"axios\"\nimport WebSocket from \"ws\"\nimport type {\n AudioChunk,\n AudioInput,\n ProviderCapabilities,\n StreamingCallbacks,\n StreamingOptions,\n StreamingSession,\n TranscribeOptions,\n UnifiedTranscriptResponse\n} from \"../router/types\"\nimport { mapEncodingToProvider } from \"../router/audio-encoding-types\"\nimport { BaseAdapter, type ProviderConfig } from \"./base-adapter\"\n\n// Import utilities\nimport { ERROR_CODES } from \"../utils/errors\"\nimport {\n waitForWebSocketOpen,\n closeWebSocket,\n setupWebSocketHandlers,\n validateSessionForAudio\n} from \"../utils/websocket-helpers\"\nimport { validateEnumValue } from \"../utils/validation\"\nimport {\n extractSpeakersFromUtterances,\n extractWords as extractWordsUtil,\n normalizeStatus\n} from \"../utils/transcription-helpers\"\nimport type { SessionStatus } from \"../router/types\"\n\n// Import generated API client functions - FULL TYPE SAFETY!\nimport {\n preRecordedControllerInitPreRecordedJobV2,\n preRecordedControllerGetPreRecordedJobV2,\n streamingControllerInitStreamingSessionV2\n} from \"../generated/gladia/api/gladiaControlAPI\"\n\n// Import Gladia generated types\nimport type { InitTranscriptionRequest } from \"../generated/gladia/schema/initTranscriptionRequest\"\nimport type { PreRecordedResponse } from \"../generated/gladia/schema/preRecordedResponse\"\nimport type { StreamingRequest } from \"../generated/gladia/schema/streamingRequest\"\nimport type { TranscriptionDTO } from \"../generated/gladia/schema/transcriptionDTO\"\nimport type { UtteranceDTO } from \"../generated/gladia/schema/utteranceDTO\"\nimport type { WordDTO } from \"../generated/gladia/schema/wordDTO\"\n// WebSocket message types for type-safe parsing\nimport type { TranscriptMessage } from \"../generated/gladia/schema/transcriptMessage\"\n// Import Gladia's supported values from OpenAPI-generated schema (type safety!)\nimport { StreamingSupportedSampleRateEnum } from \"../generated/gladia/schema/streamingSupportedSampleRateEnum\"\nimport type { StreamingSupportedEncodingEnum } from \"../generated/gladia/schema/streamingSupportedEncodingEnum\"\nimport type { TranscriptionLanguageCodeEnum } from \"../generated/gladia/schema/transcriptionLanguageCodeEnum\"\n\n/**\n * Gladia transcription provider adapter\n *\n * Implements transcription for the Gladia API with support for:\n * - Synchronous and asynchronous transcription\n * - Speaker diarization (identifying different speakers)\n * - Multi-language detection and transcription\n * - Summarization and sentiment analysis\n * - Custom vocabulary boosting\n * - Word-level timestamps\n *\n * @see https://docs.gladia.io/ Gladia API Documentation\n *\n * @example Basic transcription\n * ```typescript\n * import { GladiaAdapter } from '@meeting-baas/sdk';\n *\n * const adapter = new GladiaAdapter();\n * adapter.initialize({\n * apiKey: process.env.GLADIA_API_KEY\n * });\n *\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en',\n * diarization: true\n * });\n *\n * console.log(result.data.text);\n * console.log(result.data.speakers);\n * ```\n *\n * @example With summarization\n * ```typescript\n * const result = await adapter.transcribe(audio, {\n * language: 'en',\n * summarization: true,\n * sentimentAnalysis: true\n * });\n *\n * console.log('Summary:', result.data.summary);\n * ```\n */\nexport class GladiaAdapter extends BaseAdapter {\n readonly name = \"gladia\" as const\n readonly capabilities: ProviderCapabilities = {\n streaming: true,\n diarization: true,\n wordTimestamps: true,\n languageDetection: true,\n customVocabulary: true,\n summarization: true,\n sentimentAnalysis: true,\n entityDetection: true,\n piiRedaction: false // Gladia doesn't have PII redaction in their API\n }\n\n protected baseUrl = \"https://api.gladia.io\"\n\n /**\n * Get axios config for generated API client functions\n * Configures headers and base URL using Gladia's x-gladia-key header\n */\n protected getAxiosConfig() {\n return super.getAxiosConfig(\"x-gladia-key\")\n }\n\n /**\n * Submit audio for transcription\n *\n * Sends audio to Gladia API for transcription. If a webhook URL is provided,\n * returns immediately with the job ID. Otherwise, polls until completion.\n *\n * @param audio - Audio input (currently only URL type supported)\n * @param options - Transcription options\n * @param options.language - Language code (e.g., 'en', 'es', 'fr')\n * @param options.languageDetection - Enable automatic language detection\n * @param options.diarization - Enable speaker identification\n * @param options.speakersExpected - Number of expected speakers (for diarization)\n * @param options.summarization - Generate text summary\n * @param options.sentimentAnalysis - Analyze sentiment of transcription\n * @param options.customVocabulary - Words to boost in recognition\n * @param options.webhookUrl - Callback URL for async results\n * @returns Normalized transcription response\n * @throws {Error} If audio type is not 'url' (file/stream not yet supported)\n *\n * @example Simple transcription\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * });\n * ```\n *\n * @example With advanced features\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * }, {\n * language: 'en',\n * diarization: true,\n * speakersExpected: 3,\n * summarization: true,\n * customVocabulary: ['API', 'TypeScript', 'JavaScript']\n * });\n * ```\n *\n * @example With webhook (returns job ID immediately for polling)\n * ```typescript\n * // Submit transcription with webhook\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * }, {\n * webhookUrl: 'https://myapp.com/webhook/transcription',\n * language: 'en'\n * });\n *\n * // Get job ID for polling\n * const jobId = result.data?.id;\n * console.log('Job ID:', jobId); // Use this to poll for status\n *\n * // Later: Poll for completion (if webhook fails or you want to check)\n * const status = await adapter.getTranscript(jobId);\n * if (status.data?.status === 'completed') {\n * console.log('Transcript:', status.data.text);\n * }\n * ```\n */\n async transcribe(\n audio: AudioInput,\n options?: TranscribeOptions\n ): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Build typed request using generated types\n const request = this.buildTranscriptionRequest(audio, options)\n\n // Use generated API client function - FULLY TYPED!\n const response = await preRecordedControllerInitPreRecordedJobV2(\n request,\n this.getAxiosConfig()\n )\n\n const jobId = response.data.id\n\n // If webhook is provided, return immediately with job ID\n if (options?.webhookUrl) {\n return {\n success: true,\n provider: this.name,\n data: {\n id: jobId,\n text: \"\",\n status: \"queued\"\n },\n raw: response.data\n }\n }\n\n // Otherwise, poll for results\n return await this.pollForCompletion(jobId)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Get transcription result by ID\n */\n async getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Use generated API client function - FULLY TYPED!\n const response = await preRecordedControllerGetPreRecordedJobV2(\n transcriptId,\n this.getAxiosConfig()\n )\n\n return this.normalizeResponse(response.data)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Build Gladia transcription request from unified options\n */\n private buildTranscriptionRequest(\n audio: AudioInput,\n options?: TranscribeOptions\n ): InitTranscriptionRequest {\n // Get audio URL\n let audioUrl: string\n if (audio.type === \"url\") {\n audioUrl = audio.url\n } else {\n throw new Error(\n \"Gladia adapter currently only supports URL-based audio input. Use audio.type='url'\"\n )\n }\n\n const request: InitTranscriptionRequest = {\n audio_url: audioUrl\n }\n\n // Map options to Gladia format\n if (options) {\n // Language configuration\n if (options.language || options.languageDetection) {\n request.language_config = {\n languages: options.language\n ? [options.language as TranscriptionLanguageCodeEnum]\n : undefined,\n code_switching: options.languageDetection\n }\n }\n\n // Diarization (speaker recognition)\n if (options.diarization) {\n request.diarization = true\n if (options.speakersExpected) {\n request.diarization_config = {\n number_of_speakers: options.speakersExpected\n }\n }\n }\n\n // Custom vocabulary\n if (options.customVocabulary && options.customVocabulary.length > 0) {\n request.custom_vocabulary = true\n request.custom_vocabulary_config = {\n vocabulary: options.customVocabulary\n }\n }\n\n // Summarization\n if (options.summarization) {\n request.summarization = true\n }\n\n // Sentiment analysis\n if (options.sentimentAnalysis) {\n request.sentiment_analysis = true\n }\n\n // Named entity recognition (entity detection)\n if (options.entityDetection) {\n request.named_entity_recognition = true\n }\n\n // Webhook callback\n if (options.webhookUrl) {\n request.callback = true\n request.callback_config = {\n url: options.webhookUrl\n }\n }\n\n // Custom metadata\n if (options.metadata) {\n request.custom_metadata = options.metadata\n }\n }\n\n return request\n }\n\n /**\n * Normalize Gladia response to unified format\n */\n private normalizeResponse(response: PreRecordedResponse): UnifiedTranscriptResponse {\n // Use utility to normalize status\n const status = normalizeStatus(response.status, \"gladia\")\n\n // Handle error state\n if (response.status === \"error\") {\n return {\n success: false,\n provider: this.name,\n error: {\n code: response.error_code?.toString() || ERROR_CODES.TRANSCRIPTION_ERROR,\n message: \"Transcription failed\",\n statusCode: response.error_code || undefined\n },\n raw: response\n }\n }\n\n // Extract transcription result\n const result = response.result\n const transcription = result?.transcription\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: response.id,\n text: transcription?.full_transcript || \"\",\n confidence: undefined, // Gladia doesn't provide overall confidence\n status,\n language: transcription?.languages?.[0], // Use first detected language\n duration: undefined, // Not directly available in Gladia response\n speakers: this.extractSpeakers(transcription),\n words: this.extractWords(transcription),\n utterances: this.extractUtterances(transcription),\n summary: result?.summarization?.results || undefined,\n metadata: {\n requestParams: response.request_params,\n customMetadata: response.custom_metadata\n },\n createdAt: response.created_at,\n completedAt: response.completed_at || undefined\n },\n raw: response\n }\n }\n\n /**\n * Extract speaker information from Gladia response\n */\n private extractSpeakers(transcription: TranscriptionDTO | undefined) {\n return extractSpeakersFromUtterances(\n transcription?.utterances,\n (utterance: UtteranceDTO) => utterance.speaker,\n (id) => `Speaker ${id}`\n )\n }\n\n /**\n * Extract word timestamps from Gladia response\n */\n private extractWords(transcription: TranscriptionDTO | undefined) {\n if (!transcription?.utterances) {\n return undefined\n }\n\n // Flatten all words from all utterances\n const allWords = transcription.utterances.flatMap((utterance: UtteranceDTO) =>\n utterance.words.map((word: WordDTO) => ({\n word,\n speaker: utterance.speaker\n }))\n )\n\n return extractWordsUtil(allWords, (item) => ({\n text: item.word.word,\n start: item.word.start,\n end: item.word.end,\n confidence: item.word.confidence,\n speaker: item.speaker?.toString()\n }))\n }\n\n /**\n * Extract utterances from Gladia response\n */\n private extractUtterances(transcription: TranscriptionDTO | undefined) {\n if (!transcription?.utterances) {\n return undefined\n }\n\n return transcription.utterances.map((utterance: UtteranceDTO) => ({\n text: utterance.text,\n start: utterance.start,\n end: utterance.end,\n speaker: utterance.speaker?.toString(),\n confidence: utterance.confidence,\n words: utterance.words.map((word: WordDTO) => ({\n text: word.word,\n start: word.start,\n end: word.end,\n confidence: word.confidence\n }))\n }))\n }\n\n /**\n * Poll for transcription completion\n */\n\n /**\n * Stream audio for real-time transcription\n *\n * Creates a WebSocket connection to Gladia for streaming transcription.\n * First initializes a session via REST API, then connects to WebSocket.\n *\n * @param options - Streaming configuration options\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n *\n * @example Real-time streaming\n * ```typescript\n * const session = await adapter.transcribeStream({\n * encoding: 'wav/pcm',\n * sampleRate: 16000,\n * channels: 1,\n * language: 'en',\n * interimResults: true\n * }, {\n * onOpen: () => console.log('Connected'),\n * onTranscript: (event) => {\n * if (event.isFinal) {\n * console.log('Final:', event.text);\n * } else {\n * console.log('Interim:', event.text);\n * }\n * },\n * onError: (error) => console.error('Error:', error),\n * onClose: () => console.log('Disconnected')\n * });\n *\n * // Send audio chunks\n * const audioChunk = getAudioChunk(); // Your audio source\n * await session.sendAudio({ data: audioChunk });\n *\n * // Close when done\n * await session.close();\n * ```\n */\n async transcribeStream(\n options?: StreamingOptions,\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession> {\n this.validateConfig()\n\n // Validate sample rate against OpenAPI-generated enum\n let validatedSampleRate: StreamingSupportedSampleRateEnum | undefined\n if (options?.sampleRate) {\n validatedSampleRate = validateEnumValue(\n options.sampleRate,\n StreamingSupportedSampleRateEnum,\n \"sample rate\",\n \"Gladia\"\n )\n }\n\n // Build typed streaming request using OpenAPI-generated types\n const streamingRequest: Partial<StreamingRequest> = {\n encoding: options?.encoding\n ? (mapEncodingToProvider(options.encoding, \"gladia\") as StreamingSupportedEncodingEnum)\n : undefined,\n sample_rate: validatedSampleRate,\n channels: options?.channels,\n endpointing: options?.endpointing\n }\n\n if (options?.language) {\n streamingRequest.language_config = {\n languages: [options.language as TranscriptionLanguageCodeEnum]\n }\n }\n\n // Use generated API client function - FULLY TYPED!\n const initResponse = await streamingControllerInitStreamingSessionV2(\n streamingRequest as StreamingRequest,\n undefined, // no params\n this.getAxiosConfig()\n )\n\n const { id, url: wsUrl } = initResponse.data\n\n // Step 2: Connect to WebSocket\n const ws = new WebSocket(wsUrl)\n\n let sessionStatus: SessionStatus = \"connecting\"\n\n // Setup standard WebSocket event handlers\n setupWebSocketHandlers(ws, callbacks, (status) => {\n sessionStatus = status\n })\n\n ws.on(\"message\", (data: Buffer) => {\n try {\n const message = JSON.parse(data.toString())\n\n // Handle different message types from Gladia\n if (message.type === \"transcript\") {\n // Type-safe: cast to TranscriptMessage after checking type\n const transcriptMessage = message as TranscriptMessage\n const messageData = transcriptMessage.data\n const utterance = messageData.utterance\n\n callbacks?.onTranscript?.({\n type: \"transcript\",\n text: utterance.text,\n isFinal: messageData.is_final,\n confidence: utterance.confidence,\n words: utterance.words.map((word) => ({\n text: word.word,\n start: word.start,\n end: word.end,\n confidence: word.confidence\n })),\n data: message\n })\n } else if (message.type === \"utterance\") {\n // Utterance completed - extract from nested data.utterance structure\n const transcriptMessage = message as TranscriptMessage\n const messageData = transcriptMessage.data\n const utterance = messageData.utterance\n\n const utteranceData = {\n text: utterance.text,\n start: utterance.start,\n end: utterance.end,\n speaker: utterance.speaker?.toString(),\n confidence: utterance.confidence,\n words: utterance.words.map((word) => ({\n text: word.word,\n start: word.start,\n end: word.end,\n confidence: word.confidence\n }))\n }\n callbacks?.onUtterance?.(utteranceData)\n } else if (message.type === \"metadata\") {\n callbacks?.onMetadata?.(message)\n }\n } catch (error) {\n callbacks?.onError?.({\n code: ERROR_CODES.PARSE_ERROR,\n message: \"Failed to parse WebSocket message\",\n details: error\n })\n }\n })\n\n // Wait for WebSocket connection to open\n await waitForWebSocketOpen(ws)\n\n // Return StreamingSession interface\n return {\n id,\n provider: this.name,\n createdAt: new Date(),\n getStatus: () => sessionStatus,\n sendAudio: async (chunk: AudioChunk) => {\n // Validate session is ready\n validateSessionForAudio(sessionStatus, ws.readyState, WebSocket.OPEN)\n\n // Send raw audio data\n ws.send(chunk.data)\n\n // Send stop recording message if this is the last chunk\n if (chunk.isLast) {\n ws.send(\n JSON.stringify({\n type: \"stop_recording\"\n })\n )\n }\n },\n close: async () => {\n if (sessionStatus === \"closed\" || sessionStatus === \"closing\") {\n return\n }\n\n sessionStatus = \"closing\"\n\n // Send stop recording message before closing\n if (ws.readyState === WebSocket.OPEN) {\n ws.send(\n JSON.stringify({\n type: \"stop_recording\"\n })\n )\n }\n\n // Close WebSocket with utility\n await closeWebSocket(ws)\n sessionStatus = \"closed\"\n }\n }\n }\n}\n\n/**\n * Factory function to create a Gladia adapter\n */\nexport function createGladiaAdapter(config: ProviderConfig): GladiaAdapter {\n const adapter = new GladiaAdapter()\n adapter.initialize(config)\n return adapter\n}\n","/**\n * Unified audio encoding types for Voice Router SDK\n *\n * These types provide strict typing for audio formats across all providers,\n * preventing common bugs like passing unsupported encoding formats.\n */\n\n/**\n * Unified audio encoding formats supported across providers\n *\n * - `linear16`: PCM 16-bit linear (universal support)\n * - `mulaw`: μ-law 8-bit (Gladia, Deepgram)\n * - `alaw`: A-law 8-bit (Gladia only)\n * - `flac`: FLAC codec (Deepgram only)\n * - `opus`: Opus codec (Deepgram only)\n * - `speex`: Speex codec (Deepgram only)\n * - `amr-nb`: AMR narrowband (Deepgram only)\n * - `amr-wb`: AMR wideband (Deepgram only)\n * - `g729`: G.729 codec (Deepgram only)\n */\nexport type AudioEncoding =\n // Universal - supported by most providers\n | \"linear16\"\n // μ-law and A-law - telephony codecs\n | \"mulaw\"\n | \"alaw\"\n // Advanced codecs - Deepgram specific\n | \"flac\"\n | \"opus\"\n | \"speex\"\n | \"amr-nb\"\n | \"amr-wb\"\n | \"g729\"\n\n/**\n * Standard sample rates (Hz) for audio streaming\n */\nexport type AudioSampleRate = 8000 | 16000 | 32000 | 44100 | 48000\n\n/**\n * Standard bit depths for PCM audio\n */\nexport type AudioBitDepth = 8 | 16 | 24 | 32\n\n/**\n * Audio channel configurations\n */\nexport type AudioChannels = 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8\n\n/**\n * Provider-specific encoding format mappings\n *\n * Each provider may have different names for the same codec.\n * These mappings translate between unified format and provider format.\n */\nexport interface EncodingMapping {\n /** Unified encoding format */\n unified: AudioEncoding\n /** Provider-specific encoding string */\n provider: string\n}\n\n/**\n * Gladia encoding mappings\n * Gladia uses \"wav/xxx\" format for streaming\n */\nexport const GLADIA_ENCODING_MAP: Record<string, string> = {\n linear16: \"wav/pcm\",\n mulaw: \"wav/ulaw\",\n alaw: \"wav/alaw\"\n} as const\n\n/**\n * Deepgram encoding mappings\n * Deepgram uses lowercase format names\n */\nexport const DEEPGRAM_ENCODING_MAP: Record<string, string> = {\n linear16: \"linear16\",\n mulaw: \"mulaw\",\n flac: \"flac\",\n opus: \"opus\",\n speex: \"speex\",\n \"amr-nb\": \"amr-nb\",\n \"amr-wb\": \"amr-wb\",\n g729: \"g729\"\n} as const\n\n/**\n * AssemblyAI encoding mappings\n * AssemblyAI uses pcm_s16le for streaming\n */\nexport const ASSEMBLYAI_ENCODING_MAP: Record<string, string> = {\n linear16: \"pcm_s16le\"\n} as const\n\n/**\n * Get provider-specific encoding format from unified format\n *\n * @param unifiedEncoding - Unified encoding format\n * @param provider - Target provider\n * @returns Provider-specific encoding string\n * @throws Error if encoding is not supported by provider\n *\n * @example\n * ```typescript\n * const gladiaEncoding = mapEncodingToProvider('linear16', 'gladia')\n * // Returns: 'wav/pcm'\n *\n * const deepgramEncoding = mapEncodingToProvider('linear16', 'deepgram')\n * // Returns: 'linear16'\n * ```\n */\nexport function mapEncodingToProvider(\n unifiedEncoding: AudioEncoding,\n provider: \"gladia\" | \"deepgram\" | \"assemblyai\"\n): string {\n let mapping: Record<string, string>\n\n switch (provider) {\n case \"gladia\":\n mapping = GLADIA_ENCODING_MAP\n break\n case \"deepgram\":\n mapping = DEEPGRAM_ENCODING_MAP\n break\n case \"assemblyai\":\n mapping = ASSEMBLYAI_ENCODING_MAP\n break\n }\n\n const providerEncoding = mapping[unifiedEncoding]\n\n if (!providerEncoding) {\n throw new Error(\n `Encoding '${unifiedEncoding}' is not supported by ${provider}. ` +\n `Supported encodings: ${Object.keys(mapping).join(\", \")}`\n )\n }\n\n return providerEncoding\n}\n\n/**\n * Validate audio configuration for a specific provider\n *\n * @param config - Audio configuration to validate\n * @param provider - Target provider\n * @throws Error if configuration is invalid for the provider\n */\nexport function validateAudioConfig(\n config: {\n encoding?: AudioEncoding\n sampleRate?: AudioSampleRate\n channels?: AudioChannels\n bitDepth?: AudioBitDepth\n },\n provider: \"gladia\" | \"deepgram\" | \"assemblyai\"\n): void {\n if (config.encoding) {\n // This will throw if encoding is not supported\n mapEncodingToProvider(config.encoding, provider)\n }\n\n // Provider-specific validations\n if (provider === \"gladia\") {\n if (config.channels && (config.channels < 1 || config.channels > 8)) {\n throw new Error(\"Gladia supports 1-8 audio channels\")\n }\n }\n\n if (provider === \"assemblyai\" && config.encoding && config.encoding !== \"linear16\") {\n throw new Error(\"AssemblyAI streaming only supports linear16 encoding\")\n }\n}\n","/**\n * WebSocket utility functions for streaming transcription\n *\n * Provides reusable helpers for WebSocket connection management,\n * event handling, and session validation.\n */\n\nimport type WebSocket from \"ws\"\nimport type { StreamingCallbacks, SessionStatus } from \"../router/types\"\nimport { DEFAULT_TIMEOUTS } from \"../constants/defaults\"\nimport { ERROR_CODES, createError } from \"./errors\"\n\n/**\n * Wait for WebSocket connection to open with timeout\n *\n * @param ws - WebSocket instance\n * @param timeoutMs - Connection timeout in milliseconds\n * @returns Promise that resolves when connection opens\n * @throws Error if connection times out or fails\n *\n * @example\n * ```typescript\n * const ws = new WebSocket(url)\n * await waitForWebSocketOpen(ws)\n * // WebSocket is now open and ready\n * ```\n */\nexport function waitForWebSocketOpen(\n ws: WebSocket,\n timeoutMs: number = DEFAULT_TIMEOUTS.WS_CONNECTION\n): Promise<void> {\n return new Promise<void>((resolve, reject) => {\n const timeout = setTimeout(() => {\n reject(new Error(\"WebSocket connection timeout\"))\n }, timeoutMs)\n\n ws.once(\"open\", () => {\n clearTimeout(timeout)\n resolve()\n })\n\n ws.once(\"error\", (error) => {\n clearTimeout(timeout)\n reject(error)\n })\n })\n}\n\n/**\n * Close WebSocket gracefully with timeout\n *\n * Attempts graceful close, but will forcefully terminate if timeout is reached.\n *\n * @param ws - WebSocket instance\n * @param timeoutMs - Close timeout in milliseconds\n * @returns Promise that resolves when connection is closed\n *\n * @example\n * ```typescript\n * await closeWebSocket(ws)\n * // WebSocket is now closed\n * ```\n */\nexport function closeWebSocket(\n ws: WebSocket,\n timeoutMs: number = DEFAULT_TIMEOUTS.WS_CLOSE\n): Promise<void> {\n return new Promise<void>((resolve) => {\n const timeout = setTimeout(() => {\n ws.terminate()\n resolve()\n }, timeoutMs)\n\n ws.close()\n\n ws.once(\"close\", () => {\n clearTimeout(timeout)\n resolve()\n })\n })\n}\n\n/**\n * Setup standard WebSocket event handlers\n *\n * Configures consistent event handling for open, error, and close events\n * across all streaming adapters.\n *\n * @param ws - WebSocket instance\n * @param callbacks - Streaming callbacks from user\n * @param setSessionStatus - Function to update session status\n *\n * @example\n * ```typescript\n * let sessionStatus: SessionStatus = \"connecting\"\n *\n * setupWebSocketHandlers(\n * ws,\n * callbacks,\n * (status) => { sessionStatus = status }\n * )\n * ```\n */\nexport function setupWebSocketHandlers(\n ws: WebSocket,\n callbacks: StreamingCallbacks | undefined,\n setSessionStatus: (status: SessionStatus) => void\n): void {\n ws.on(\"open\", () => {\n setSessionStatus(\"open\")\n callbacks?.onOpen?.()\n })\n\n ws.on(\"error\", (error: Error) => {\n callbacks?.onError?.(createError(ERROR_CODES.WEBSOCKET_ERROR, error.message, error))\n })\n\n ws.on(\"close\", (code: number, reason: Buffer) => {\n setSessionStatus(\"closed\")\n callbacks?.onClose?.(code, reason.toString())\n })\n}\n\n/**\n * Validate that WebSocket session is ready to send audio\n *\n * Checks both session status and WebSocket ready state before allowing\n * audio data to be sent.\n *\n * @param sessionStatus - Current session status\n * @param wsReadyState - WebSocket readyState value\n * @param WebSocketOpen - WebSocket.OPEN constant value\n * @throws Error if session is not ready\n *\n * @example\n * ```typescript\n * validateSessionForAudio(sessionStatus, ws.readyState, WebSocket.OPEN)\n * ws.send(audioData) // Safe to send now\n * ```\n */\nexport function validateSessionForAudio(\n sessionStatus: SessionStatus,\n wsReadyState: number,\n WebSocketOpen: number\n): void {\n if (sessionStatus !== \"open\") {\n throw new Error(`Cannot send audio: session is ${sessionStatus}`)\n }\n\n if (wsReadyState !== WebSocketOpen) {\n throw new Error(\"WebSocket is not open\")\n }\n}\n","/**\n * Validation utilities for Voice Router SDK\n *\n * Provides generic validators for OpenAPI-generated enum types,\n * ensuring type-safe validation across all providers.\n */\n\n/**\n * Validate that a value is a member of an OpenAPI-generated enum\n *\n * This generic validator works with any enum type generated by Orval from\n * OpenAPI specifications, providing consistent validation and error messages.\n *\n * @param value - Value to validate\n * @param enumType - OpenAPI-generated enum object\n * @param fieldName - Human-readable field name for error messages\n * @param provider - Provider name for error messages\n * @returns The validated value, properly typed as the enum type\n * @throws Error if value is not in the enum\n *\n * @example Validate Gladia sample rate\n * ```typescript\n * import { StreamingSupportedSampleRateEnum } from \"../generated/gladia/schema/streamingSupportedSampleRateEnum\"\n *\n * const validatedRate = validateEnumValue(\n * 16000,\n * StreamingSupportedSampleRateEnum,\n * \"sample rate\",\n * \"Gladia\"\n * )\n * // Returns: 16000 (typed as StreamingSupportedSampleRateEnum)\n *\n * validateEnumValue(\n * 22050,\n * StreamingSupportedSampleRateEnum,\n * \"sample rate\",\n * \"Gladia\"\n * )\n * // Throws: \"Gladia does not support sample rate '22050'. Supported values (from OpenAPI spec): 8000, 16000, 32000, 44100, 48000\"\n * ```\n *\n * @example Validate Deepgram encoding\n * ```typescript\n * import { ListenV1EncodingParameter } from \"../generated/deepgram/schema/listenV1EncodingParameter\"\n *\n * const encoding = validateEnumValue(\n * \"linear16\",\n * ListenV1EncodingParameter,\n * \"encoding\",\n * \"Deepgram\"\n * )\n * ```\n */\nexport function validateEnumValue<T extends Record<string, string | number>>(\n value: unknown,\n enumType: T,\n fieldName: string,\n provider: string\n): T[keyof T] {\n const validValues = Object.values(enumType)\n const isValid = validValues.some((v) => v === value)\n\n if (!isValid) {\n throw new Error(\n `${provider} does not support ${fieldName} '${value}'. ` +\n `Supported values (from OpenAPI spec): ${validValues.join(\", \")}`\n )\n }\n\n return value as T[keyof T]\n}\n\n/**\n * Validate optional enum value\n *\n * Like validateEnumValue but returns undefined if value is nullish,\n * useful for optional configuration parameters.\n *\n * @param value - Value to validate (can be undefined/null)\n * @param enumType - OpenAPI-generated enum object\n * @param fieldName - Human-readable field name for error messages\n * @param provider - Provider name for error messages\n * @returns The validated value or undefined\n * @throws Error if value is not in the enum (but not if undefined/null)\n *\n * @example\n * ```typescript\n * const rate = validateOptionalEnumValue(\n * options?.sampleRate,\n * StreamingSupportedSampleRateEnum,\n * \"sample rate\",\n * \"Gladia\"\n * )\n * // Returns: validated rate or undefined\n * ```\n */\nexport function validateOptionalEnumValue<T extends Record<string, string | number>>(\n value: unknown,\n enumType: T,\n fieldName: string,\n provider: string\n): T[keyof T] | undefined {\n if (value === undefined || value === null) {\n return undefined\n }\n\n return validateEnumValue(value, enumType, fieldName, provider)\n}\n","/**\n * Transcription processing utilities\n *\n * Provides reusable helpers for extracting and normalizing transcription\n * data (speakers, words, utterances) across different provider formats.\n */\n\nimport type { Speaker, Word, TranscriptionStatus } from \"../router/types\"\n\n/**\n * Extract unique speakers from utterances\n *\n * Generic helper that works with any provider's utterance format via\n * a mapping function to extract speaker IDs.\n *\n * @param utterances - Provider-specific utterances array\n * @param getSpeakerId - Function to extract speaker ID from utterance\n * @param formatLabel - Optional function to format speaker label\n * @returns Array of unique speakers or undefined if none found\n *\n * @example Gladia\n * ```typescript\n * const speakers = extractSpeakersFromUtterances(\n * transcription?.utterances,\n * (utterance) => utterance.speaker,\n * (id) => `Speaker ${id}`\n * )\n * ```\n *\n * @example AssemblyAI (already has good labels)\n * ```typescript\n * const speakers = extractSpeakersFromUtterances(\n * transcript.utterances,\n * (utterance) => utterance.speaker,\n * (id) => id // Keep as-is: \"A\", \"B\", \"C\"\n * )\n * ```\n */\nexport function extractSpeakersFromUtterances<T>(\n utterances: T[] | undefined | null,\n getSpeakerId: (utterance: T) => string | number | undefined,\n formatLabel?: (speakerId: string) => string\n): Speaker[] | undefined {\n if (!utterances || utterances.length === 0) {\n return undefined\n }\n\n const speakerSet = new Set<string>()\n\n utterances.forEach((utterance) => {\n const speakerId = getSpeakerId(utterance)\n if (speakerId !== undefined) {\n speakerSet.add(String(speakerId))\n }\n })\n\n if (speakerSet.size === 0) {\n return undefined\n }\n\n return Array.from(speakerSet).map((speakerId) => ({\n id: speakerId,\n label: formatLabel ? formatLabel(speakerId) : `Speaker ${speakerId}`\n }))\n}\n\n/**\n * Extract and normalize words from provider-specific format\n *\n * Generic helper that maps provider word formats to unified Word type.\n *\n * @param words - Provider-specific words array\n * @param mapper - Function to convert provider word to unified Word\n * @returns Array of normalized words or undefined if none found\n *\n * @example Gladia\n * ```typescript\n * const words = extractWords(\n * allWords,\n * (word: WordDTO) => ({\n * text: word.word,\n * start: word.start,\n * end: word.end,\n * confidence: word.confidence\n * })\n * )\n * ```\n */\nexport function extractWords<T>(\n words: T[] | undefined | null,\n mapper: (word: T) => Word\n): Word[] | undefined {\n if (!words || words.length === 0) {\n return undefined\n }\n\n const normalizedWords = words.map(mapper)\n return normalizedWords.length > 0 ? normalizedWords : undefined\n}\n\n/**\n * Status mapping configurations for each provider\n *\n * Maps provider-specific status strings to unified TranscriptionStatus.\n * Keys are lowercase provider status values, values are unified statuses.\n */\nexport const STATUS_MAPPINGS = {\n gladia: {\n queued: \"queued\" as TranscriptionStatus,\n processing: \"processing\" as TranscriptionStatus,\n done: \"completed\" as TranscriptionStatus,\n error: \"error\" as TranscriptionStatus\n },\n assemblyai: {\n queued: \"queued\" as TranscriptionStatus,\n processing: \"processing\" as TranscriptionStatus,\n completed: \"completed\" as TranscriptionStatus,\n error: \"error\" as TranscriptionStatus\n },\n deepgram: {\n queued: \"queued\" as TranscriptionStatus,\n processing: \"processing\" as TranscriptionStatus,\n completed: \"completed\" as TranscriptionStatus,\n error: \"error\" as TranscriptionStatus\n },\n azure: {\n succeeded: \"completed\" as TranscriptionStatus,\n running: \"processing\" as TranscriptionStatus,\n notstarted: \"queued\" as TranscriptionStatus,\n failed: \"error\" as TranscriptionStatus\n },\n speechmatics: {\n running: \"processing\" as TranscriptionStatus,\n done: \"completed\" as TranscriptionStatus,\n rejected: \"error\" as TranscriptionStatus,\n expired: \"error\" as TranscriptionStatus\n }\n} as const\n\nexport type SupportedProvider = keyof typeof STATUS_MAPPINGS\n\n/**\n * Normalize provider status to unified status\n *\n * Handles both exact matches and substring matches (for Azure-style statuses).\n *\n * @param providerStatus - Status string from provider API\n * @param provider - Provider name\n * @param defaultStatus - Fallback status if no match found\n * @returns Unified transcription status\n *\n * @example Gladia\n * ```typescript\n * const status = normalizeStatus(response.status, \"gladia\")\n * // \"done\" -> \"completed\"\n * ```\n *\n * @example Azure (substring matching)\n * ```typescript\n * const status = normalizeStatus(\"Succeeded\", \"azure\")\n * // Case-insensitive substring match: \"Succeeded\" contains \"succeeded\" -> \"completed\"\n * ```\n */\nexport function normalizeStatus(\n providerStatus: string | undefined | null,\n provider: SupportedProvider,\n defaultStatus: TranscriptionStatus = \"queued\"\n): TranscriptionStatus {\n if (!providerStatus) return defaultStatus\n\n const mapping = STATUS_MAPPINGS[provider]\n const statusKey = providerStatus.toString().toLowerCase()\n\n // Try exact match first\n if (statusKey in mapping) {\n return mapping[statusKey as keyof typeof mapping]\n }\n\n // Try substring match (for Azure-style statuses like \"Succeeded\", \"NotStarted\")\n for (const [key, value] of Object.entries(mapping)) {\n if (statusKey.includes(key)) {\n return value\n }\n }\n\n return defaultStatus\n}\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nimport { faker } from \"@faker-js/faker\"\nimport type { AxiosRequestConfig, AxiosResponse } from \"axios\"\nimport axios from \"axios\"\nimport { delay, HttpResponse, http } from \"msw\"\nimport type {\n AudioToTextControllerAudioTranscriptionBody,\n AudioUploadResponse,\n CustomVocabularyEntryDTO,\n FileControllerUploadV2BodyOne,\n FileControllerUploadV2BodyTwo,\n HistoryControllerGetListV1Params,\n InitPreRecordedTranscriptionResponse,\n InitStreamingResponse,\n InitTranscriptionRequest,\n ListHistoryResponse,\n ListPreRecordedResponse,\n ListStreamingResponse,\n ListTranscriptionResponse,\n PatchRequestParamsDTO,\n PreRecordedControllerGetPreRecordedJobsV2Params,\n PreRecordedResponse,\n StreamingControllerGetStreamingJobsV2Params,\n StreamingControllerInitStreamingSessionV2Params,\n StreamingRequest,\n StreamingResponse,\n TranscriptionControllerGetTranscriptV2200,\n TranscriptionControllerListV2Params,\n VideoToTextControllerVideoTranscriptionBody\n} from \"../schema\"\nimport {\n CallbackMethodEnum,\n StreamingSupportedEncodingEnum,\n StreamingSupportedModels,\n SubtitlesFormatEnum,\n SubtitlesStyleEnum,\n SummaryTypesEnum,\n TranscriptionLanguageCodeEnum,\n TranslationLanguageCodeEnum,\n TranslationModelEnum\n} from \"../schema\"\n\n/**\n * @summary Upload an audio file or provide an audio URL for processing\n */\nexport const fileControllerUploadV2 = <TData = AxiosResponse<AudioUploadResponse>>(\n fileControllerUploadV2Body: FileControllerUploadV2BodyOne | FileControllerUploadV2BodyTwo,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/v2/upload\", fileControllerUploadV2Body, options)\n}\n\n/**\n * @summary Initiate a new pre recorded job\n */\nexport const preRecordedControllerInitPreRecordedJobV2 = <\n TData = AxiosResponse<InitPreRecordedTranscriptionResponse>\n>(\n initTranscriptionRequest: InitTranscriptionRequest,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/v2/pre-recorded\", initTranscriptionRequest, options)\n}\n\n/**\n * @summary Get pre recorded jobs based on query parameters\n */\nexport const preRecordedControllerGetPreRecordedJobsV2 = <\n TData = AxiosResponse<ListPreRecordedResponse>\n>(\n params?: PreRecordedControllerGetPreRecordedJobsV2Params,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/v2/pre-recorded\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Get the pre recorded job's metadata\n */\nexport const preRecordedControllerGetPreRecordedJobV2 = <\n TData = AxiosResponse<PreRecordedResponse>\n>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/pre-recorded/${id}`, options)\n}\n\n/**\n * @summary Delete the pre recorded job\n */\nexport const preRecordedControllerDeletePreRecordedJobV2 = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/v2/pre-recorded/${id}`, options)\n}\n\n/**\n * @summary Download the audio file used for this pre recorded job\n */\nexport const preRecordedControllerGetAudioV2 = <TData = AxiosResponse<Blob>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/pre-recorded/${id}/file`, {\n responseType: \"blob\",\n ...options\n })\n}\n\n/**\n * @summary Initiate a new transcription job\n */\nexport const transcriptionControllerInitPreRecordedJobV2 = <\n TData = AxiosResponse<InitPreRecordedTranscriptionResponse>\n>(\n initTranscriptionRequest: InitTranscriptionRequest,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/v2/transcription\", initTranscriptionRequest, options)\n}\n\n/**\n * @summary Get transcription jobs based on query parameters\n */\nexport const transcriptionControllerListV2 = <TData = AxiosResponse<ListTranscriptionResponse>>(\n params?: TranscriptionControllerListV2Params,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/v2/transcription\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Get the transcription job's metadata\n */\nexport const transcriptionControllerGetTranscriptV2 = <\n TData = AxiosResponse<TranscriptionControllerGetTranscriptV2200>\n>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcription/${id}`, options)\n}\n\n/**\n * @summary Delete the transcription job\n */\nexport const transcriptionControllerDeleteTranscriptV2 = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/v2/transcription/${id}`, options)\n}\n\n/**\n * @summary Download the audio file used for this transcription job\n */\nexport const transcriptionControllerGetAudioV2 = <TData = AxiosResponse<Blob>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcription/${id}/file`, {\n responseType: \"blob\",\n ...options\n })\n}\n\nexport const audioToTextControllerAudioTranscription = <TData = AxiosResponse<void>>(\n audioToTextControllerAudioTranscriptionBody: AudioToTextControllerAudioTranscriptionBody,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n const formData = new FormData()\n if (audioToTextControllerAudioTranscriptionBody.audio !== undefined) {\n formData.append(\"audio\", audioToTextControllerAudioTranscriptionBody.audio)\n }\n if (audioToTextControllerAudioTranscriptionBody.audio_url !== undefined) {\n formData.append(\"audio_url\", audioToTextControllerAudioTranscriptionBody.audio_url)\n }\n if (audioToTextControllerAudioTranscriptionBody.language_behaviour !== undefined) {\n formData.append(\n \"language_behaviour\",\n audioToTextControllerAudioTranscriptionBody.language_behaviour\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.language !== undefined) {\n formData.append(\"language\", audioToTextControllerAudioTranscriptionBody.language)\n }\n if (audioToTextControllerAudioTranscriptionBody.transcription_hint !== undefined) {\n formData.append(\n \"transcription_hint\",\n audioToTextControllerAudioTranscriptionBody.transcription_hint\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.toggle_diarization !== undefined) {\n formData.append(\n \"toggle_diarization\",\n audioToTextControllerAudioTranscriptionBody.toggle_diarization.toString()\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.diarization_num_speakers !== undefined) {\n formData.append(\n \"diarization_num_speakers\",\n audioToTextControllerAudioTranscriptionBody.diarization_num_speakers.toString()\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.diarization_min_speakers !== undefined) {\n formData.append(\n \"diarization_min_speakers\",\n audioToTextControllerAudioTranscriptionBody.diarization_min_speakers.toString()\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.diarization_max_speakers !== undefined) {\n formData.append(\n \"diarization_max_speakers\",\n audioToTextControllerAudioTranscriptionBody.diarization_max_speakers.toString()\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.toggle_direct_translate !== undefined) {\n formData.append(\n \"toggle_direct_translate\",\n audioToTextControllerAudioTranscriptionBody.toggle_direct_translate.toString()\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.target_translation_language !== undefined) {\n formData.append(\n \"target_translation_language\",\n audioToTextControllerAudioTranscriptionBody.target_translation_language\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.output_format !== undefined) {\n formData.append(\"output_format\", audioToTextControllerAudioTranscriptionBody.output_format)\n }\n if (audioToTextControllerAudioTranscriptionBody.toggle_noise_reduction !== undefined) {\n formData.append(\n \"toggle_noise_reduction\",\n audioToTextControllerAudioTranscriptionBody.toggle_noise_reduction.toString()\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.toggle_accurate_words_timestamps !== undefined) {\n formData.append(\n \"toggle_accurate_words_timestamps\",\n audioToTextControllerAudioTranscriptionBody.toggle_accurate_words_timestamps.toString()\n )\n }\n if (audioToTextControllerAudioTranscriptionBody.webhook_url !== undefined) {\n formData.append(\"webhook_url\", audioToTextControllerAudioTranscriptionBody.webhook_url)\n }\n\n return axios.post(\"/audio/text/audio-transcription\", formData, options)\n}\n\nexport const videoToTextControllerVideoTranscription = <TData = AxiosResponse<void>>(\n videoToTextControllerVideoTranscriptionBody: VideoToTextControllerVideoTranscriptionBody,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n const formData = new FormData()\n if (videoToTextControllerVideoTranscriptionBody.video !== undefined) {\n formData.append(\"video\", videoToTextControllerVideoTranscriptionBody.video)\n }\n if (videoToTextControllerVideoTranscriptionBody.video_url !== undefined) {\n formData.append(\"video_url\", videoToTextControllerVideoTranscriptionBody.video_url)\n }\n if (videoToTextControllerVideoTranscriptionBody.language_behaviour !== undefined) {\n formData.append(\n \"language_behaviour\",\n videoToTextControllerVideoTranscriptionBody.language_behaviour\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.language !== undefined) {\n formData.append(\"language\", videoToTextControllerVideoTranscriptionBody.language)\n }\n if (videoToTextControllerVideoTranscriptionBody.transcription_hint !== undefined) {\n formData.append(\n \"transcription_hint\",\n videoToTextControllerVideoTranscriptionBody.transcription_hint\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.toggle_diarization !== undefined) {\n formData.append(\n \"toggle_diarization\",\n videoToTextControllerVideoTranscriptionBody.toggle_diarization.toString()\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.diarization_num_speakers !== undefined) {\n formData.append(\n \"diarization_num_speakers\",\n videoToTextControllerVideoTranscriptionBody.diarization_num_speakers.toString()\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.diarization_min_speakers !== undefined) {\n formData.append(\n \"diarization_min_speakers\",\n videoToTextControllerVideoTranscriptionBody.diarization_min_speakers.toString()\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.diarization_max_speakers !== undefined) {\n formData.append(\n \"diarization_max_speakers\",\n videoToTextControllerVideoTranscriptionBody.diarization_max_speakers.toString()\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.toggle_direct_translate !== undefined) {\n formData.append(\n \"toggle_direct_translate\",\n videoToTextControllerVideoTranscriptionBody.toggle_direct_translate.toString()\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.target_translation_language !== undefined) {\n formData.append(\n \"target_translation_language\",\n videoToTextControllerVideoTranscriptionBody.target_translation_language\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.output_format !== undefined) {\n formData.append(\"output_format\", videoToTextControllerVideoTranscriptionBody.output_format)\n }\n if (videoToTextControllerVideoTranscriptionBody.toggle_noise_reduction !== undefined) {\n formData.append(\n \"toggle_noise_reduction\",\n videoToTextControllerVideoTranscriptionBody.toggle_noise_reduction.toString()\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.toggle_accurate_words_timestamps !== undefined) {\n formData.append(\n \"toggle_accurate_words_timestamps\",\n videoToTextControllerVideoTranscriptionBody.toggle_accurate_words_timestamps.toString()\n )\n }\n if (videoToTextControllerVideoTranscriptionBody.webhook_url !== undefined) {\n formData.append(\"webhook_url\", videoToTextControllerVideoTranscriptionBody.webhook_url)\n }\n\n return axios.post(\"/video/text/video-transcription\", formData, options)\n}\n\n/**\n * @summary Get the history of all your jobs\n */\nexport const historyControllerGetListV1 = <TData = AxiosResponse<ListHistoryResponse>>(\n params?: HistoryControllerGetListV1Params,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/v1/history\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Initiate a new live job\n */\nexport const streamingControllerInitStreamingSessionV2 = <\n TData = AxiosResponse<InitStreamingResponse>\n>(\n streamingRequest: StreamingRequest,\n params?: StreamingControllerInitStreamingSessionV2Params,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/v2/live\", streamingRequest, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Get live jobs based on query parameters\n */\nexport const streamingControllerGetStreamingJobsV2 = <TData = AxiosResponse<ListStreamingResponse>>(\n params?: StreamingControllerGetStreamingJobsV2Params,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/v2/live\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Get the live job's metadata\n */\nexport const streamingControllerGetStreamingJobV2 = <TData = AxiosResponse<StreamingResponse>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/live/${id}`, options)\n}\n\n/**\n * @summary Delete the live job\n */\nexport const streamingControllerDeleteStreamingJobV2 = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/v2/live/${id}`, options)\n}\n\n/**\n * @summary For debugging purposes, send post session metadata in the request params of the job\n */\nexport const streamingControllerPatchRequestParamsV2 = <TData = AxiosResponse<void>>(\n id: string,\n patchRequestParamsDTO: PatchRequestParamsDTO,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/v2/live/${id}`, patchRequestParamsDTO, options)\n}\n\n/**\n * @summary Download the audio file used for this live job\n */\nexport const streamingControllerGetAudioV2 = <TData = AxiosResponse<Blob>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/live/${id}/file`, {\n responseType: \"blob\",\n ...options\n })\n}\n\nexport type FileControllerUploadV2Result = AxiosResponse<AudioUploadResponse>\nexport type PreRecordedControllerInitPreRecordedJobV2Result =\n AxiosResponse<InitPreRecordedTranscriptionResponse>\nexport type PreRecordedControllerGetPreRecordedJobsV2Result = AxiosResponse<ListPreRecordedResponse>\nexport type PreRecordedControllerGetPreRecordedJobV2Result = AxiosResponse<PreRecordedResponse>\nexport type PreRecordedControllerDeletePreRecordedJobV2Result = AxiosResponse<void>\nexport type PreRecordedControllerGetAudioV2Result = AxiosResponse<Blob>\nexport type TranscriptionControllerInitPreRecordedJobV2Result =\n AxiosResponse<InitPreRecordedTranscriptionResponse>\nexport type TranscriptionControllerListV2Result = AxiosResponse<ListTranscriptionResponse>\nexport type TranscriptionControllerGetTranscriptV2Result =\n AxiosResponse<TranscriptionControllerGetTranscriptV2200>\nexport type TranscriptionControllerDeleteTranscriptV2Result = AxiosResponse<void>\nexport type TranscriptionControllerGetAudioV2Result = AxiosResponse<Blob>\nexport type AudioToTextControllerAudioTranscriptionResult = AxiosResponse<void>\nexport type VideoToTextControllerVideoTranscriptionResult = AxiosResponse<void>\nexport type HistoryControllerGetListV1Result = AxiosResponse<ListHistoryResponse>\nexport type StreamingControllerInitStreamingSessionV2Result = AxiosResponse<InitStreamingResponse>\nexport type StreamingControllerGetStreamingJobsV2Result = AxiosResponse<ListStreamingResponse>\nexport type StreamingControllerGetStreamingJobV2Result = AxiosResponse<StreamingResponse>\nexport type StreamingControllerDeleteStreamingJobV2Result = AxiosResponse<void>\nexport type StreamingControllerPatchRequestParamsV2Result = AxiosResponse<void>\nexport type StreamingControllerGetAudioV2Result = AxiosResponse<Blob>\n\nexport const getFileControllerUploadV2ResponseMock = (\n overrideResponse: Partial<AudioUploadResponse> = {}\n): AudioUploadResponse => ({\n audio_url: faker.internet.url(),\n audio_metadata: {\n ...{\n id: faker.string.uuid(),\n filename: faker.string.alpha(20),\n source: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n extension: faker.string.uuid(),\n size: faker.number.int({ min: undefined, max: undefined }),\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_channels: faker.number.int({ min: undefined, max: undefined })\n }\n },\n ...overrideResponse\n})\n\nexport const getPreRecordedControllerInitPreRecordedJobV2ResponseMock = (\n overrideResponse: Partial<InitPreRecordedTranscriptionResponse> = {}\n): InitPreRecordedTranscriptionResponse => ({\n id: faker.string.uuid(),\n result_url: faker.internet.url(),\n ...overrideResponse\n})\n\nexport const getPreRecordedControllerGetPreRecordedJobsV2ResponseCustomVocabularyEntryDTOMock = (\n overrideResponse: Partial<CustomVocabularyEntryDTO> = {}\n): CustomVocabularyEntryDTO => ({\n ...{\n value: faker.string.alpha(20),\n intensity: faker.helpers.arrayElement([faker.number.int({ min: 0, max: 1 }), undefined]),\n pronunciations: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getPreRecordedControllerGetPreRecordedJobsV2ResponseMock = (\n overrideResponse: Partial<ListPreRecordedResponse> = {}\n): ListPreRecordedResponse => ({\n first: faker.internet.url(),\n current: faker.internet.url(),\n next: faker.helpers.arrayElement([faker.internet.url(), null]),\n items: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"pre-recorded\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n context_prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n custom_vocabulary: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getPreRecordedControllerGetPreRecordedJobsV2ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n detect_language: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n enable_code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n code_switching_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n callback_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.internet.url(),\n method: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(CallbackMethodEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n subtitles_config: faker.helpers.arrayElement([\n {\n ...{\n formats: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(SubtitlesFormatEnum)),\n undefined\n ]),\n minimum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n maximum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 30 }),\n undefined\n ]),\n maximum_characters_per_row: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maximum_rows_per_caption: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 5 }),\n undefined\n ]),\n style: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubtitlesStyleEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n diarization_config: faker.helpers.arrayElement([\n {\n ...{\n number_of_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n min_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n max_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n name_consistency: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n structured_data_extraction_config: faker.helpers.arrayElement([\n {\n ...{\n classes: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm_config: faker.helpers.arrayElement([\n {\n ...{\n prompts: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n display_mode: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n punctuation_enhanced: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n audio_url: faker.helpers.arrayElement([faker.internet.url(), null])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n name_consistency: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n speaker_reidentification: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n audio_to_llm: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {\n ...{\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n response: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n }\n }))\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n display_mode: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ])\n }\n },\n undefined\n ])\n })),\n ...overrideResponse\n})\n\nexport const getPreRecordedControllerGetPreRecordedJobV2ResponseCustomVocabularyEntryDTOMock = (\n overrideResponse: Partial<CustomVocabularyEntryDTO> = {}\n): CustomVocabularyEntryDTO => ({\n ...{\n value: faker.string.alpha(20),\n intensity: faker.helpers.arrayElement([faker.number.int({ min: 0, max: 1 }), undefined]),\n pronunciations: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getPreRecordedControllerGetPreRecordedJobV2ResponseMock = (\n overrideResponse: Partial<PreRecordedResponse> = {}\n): PreRecordedResponse => ({\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"pre-recorded\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n context_prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n custom_vocabulary: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getPreRecordedControllerGetPreRecordedJobV2ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n detect_language: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n enable_code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n code_switching_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n callback_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.internet.url(),\n method: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(CallbackMethodEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n subtitles_config: faker.helpers.arrayElement([\n {\n ...{\n formats: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(SubtitlesFormatEnum)),\n undefined\n ]),\n minimum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n maximum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 30 }),\n undefined\n ]),\n maximum_characters_per_row: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maximum_rows_per_caption: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 5 }),\n undefined\n ]),\n style: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubtitlesStyleEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n diarization_config: faker.helpers.arrayElement([\n {\n ...{\n number_of_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n min_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n max_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n named_entity_recognition: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n name_consistency: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n structured_data_extraction_config: faker.helpers.arrayElement([\n {\n ...{\n classes: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm_config: faker.helpers.arrayElement([\n {\n ...{\n prompts: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n display_mode: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n punctuation_enhanced: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n audio_url: faker.helpers.arrayElement([faker.internet.url(), null])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n sentences: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })\n ),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })\n ),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(Object.values(TranslationLanguageCodeEnum)),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n name_consistency: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n speaker_reidentification: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n audio_to_llm: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {\n ...{\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n response: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n }\n }))\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n display_mode: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ])\n }\n },\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getPreRecordedControllerGetAudioV2ResponseMock = (): Blob =>\n new Blob(faker.helpers.arrayElements(faker.word.words(10).split(\" \")))\n\nexport const getTranscriptionControllerInitPreRecordedJobV2ResponseMock = (\n overrideResponse: Partial<InitPreRecordedTranscriptionResponse> = {}\n): InitPreRecordedTranscriptionResponse => ({\n id: faker.string.uuid(),\n result_url: faker.internet.url(),\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerListV2ResponseCustomVocabularyEntryDTOMock = (\n overrideResponse: Partial<CustomVocabularyEntryDTO> = {}\n): CustomVocabularyEntryDTO => ({\n ...{\n value: faker.string.alpha(20),\n intensity: faker.helpers.arrayElement([faker.number.int({ min: 0, max: 1 }), undefined]),\n pronunciations: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerListV2ResponsePreRecordedResponseMock = (\n overrideResponse: Partial<PreRecordedResponse> = {}\n): PreRecordedResponse => ({\n ...{\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"pre-recorded\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n context_prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n custom_vocabulary: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n { ...getTranscriptionControllerListV2ResponseCustomVocabularyEntryDTOMock() },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n detect_language: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n enable_code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n code_switching_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n callback_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.internet.url(),\n method: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(CallbackMethodEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n subtitles_config: faker.helpers.arrayElement([\n {\n ...{\n formats: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(SubtitlesFormatEnum)),\n undefined\n ]),\n minimum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n maximum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 30 }),\n undefined\n ]),\n maximum_characters_per_row: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maximum_rows_per_caption: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 5 }),\n undefined\n ]),\n style: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubtitlesStyleEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n diarization_config: faker.helpers.arrayElement([\n {\n ...{\n number_of_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n min_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n max_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n name_consistency: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n structured_data_extraction_config: faker.helpers.arrayElement([\n {\n ...{\n classes: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm_config: faker.helpers.arrayElement([\n {\n ...{\n prompts: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n display_mode: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n punctuation_enhanced: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n audio_url: faker.helpers.arrayElement([faker.internet.url(), null])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n name_consistency: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n speaker_reidentification: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n audio_to_llm: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {\n ...{\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n response: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n }\n }))\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n display_mode: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ])\n }\n },\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerListV2ResponseStreamingResponseMock = (\n overrideResponse: Partial<StreamingResponse> = {}\n): StreamingResponse => ({\n ...{\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"live\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n encoding: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedEncodingEnum)),\n undefined\n ]),\n bit_depth: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8, 16, 24, 32] as const),\n undefined\n ]),\n sample_rate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8000, 16000, 32000, 44100, 48000] as const),\n undefined\n ]),\n channels: faker.helpers.arrayElement([faker.number.int({ min: 1, max: 8 }), undefined]),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedModels)),\n undefined\n ]),\n endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 0.01, max: 10 }),\n undefined\n ]),\n maximum_duration_without_endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 5, max: 60 }),\n undefined\n ]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n pre_processing: faker.helpers.arrayElement([\n {\n ...{\n audio_enhancer: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n speech_threshold: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n realtime_processing: faker.helpers.arrayElement([\n {\n ...{\n custom_vocabulary: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getTranscriptionControllerListV2ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n post_processing: faker.helpers.arrayElement([\n {\n ...{\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n messages_config: faker.helpers.arrayElement([\n {\n ...{\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n messages: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ])\n }\n },\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerListV2ResponseMock = (\n overrideResponse: Partial<ListTranscriptionResponse> = {}\n): ListTranscriptionResponse => ({\n first: faker.internet.url(),\n current: faker.internet.url(),\n next: faker.helpers.arrayElement([faker.internet.url(), null]),\n items: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.helpers.arrayElement([\n { ...getTranscriptionControllerListV2ResponsePreRecordedResponseMock() },\n { ...getTranscriptionControllerListV2ResponseStreamingResponseMock() }\n ])\n ),\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerGetTranscriptV2ResponseCustomVocabularyEntryDTOMock = (\n overrideResponse: Partial<CustomVocabularyEntryDTO> = {}\n): CustomVocabularyEntryDTO => ({\n ...{\n value: faker.string.alpha(20),\n intensity: faker.helpers.arrayElement([faker.number.int({ min: 0, max: 1 }), undefined]),\n pronunciations: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerGetTranscriptV2ResponsePreRecordedResponseMock = (\n overrideResponse: Partial<PreRecordedResponse> = {}\n): PreRecordedResponse => ({\n ...{\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"pre-recorded\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n context_prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n custom_vocabulary: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getTranscriptionControllerGetTranscriptV2ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n detect_language: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n enable_code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n code_switching_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n callback_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.internet.url(),\n method: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(CallbackMethodEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n subtitles_config: faker.helpers.arrayElement([\n {\n ...{\n formats: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(SubtitlesFormatEnum)),\n undefined\n ]),\n minimum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n maximum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 30 }),\n undefined\n ]),\n maximum_characters_per_row: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maximum_rows_per_caption: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 5 }),\n undefined\n ]),\n style: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubtitlesStyleEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n diarization_config: faker.helpers.arrayElement([\n {\n ...{\n number_of_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n min_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n max_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n name_consistency: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n structured_data_extraction_config: faker.helpers.arrayElement([\n {\n ...{\n classes: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm_config: faker.helpers.arrayElement([\n {\n ...{\n prompts: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n display_mode: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n punctuation_enhanced: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n audio_url: faker.helpers.arrayElement([faker.internet.url(), null])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n name_consistency: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n speaker_reidentification: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n audio_to_llm: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {\n ...{\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n response: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n }\n }))\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n display_mode: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ])\n }\n },\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerGetTranscriptV2ResponseStreamingResponseMock = (\n overrideResponse: Partial<StreamingResponse> = {}\n): StreamingResponse => ({\n ...{\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"live\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n encoding: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedEncodingEnum)),\n undefined\n ]),\n bit_depth: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8, 16, 24, 32] as const),\n undefined\n ]),\n sample_rate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8000, 16000, 32000, 44100, 48000] as const),\n undefined\n ]),\n channels: faker.helpers.arrayElement([faker.number.int({ min: 1, max: 8 }), undefined]),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedModels)),\n undefined\n ]),\n endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 0.01, max: 10 }),\n undefined\n ]),\n maximum_duration_without_endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 5, max: 60 }),\n undefined\n ]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n pre_processing: faker.helpers.arrayElement([\n {\n ...{\n audio_enhancer: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n speech_threshold: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n realtime_processing: faker.helpers.arrayElement([\n {\n ...{\n custom_vocabulary: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getTranscriptionControllerGetTranscriptV2ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n post_processing: faker.helpers.arrayElement([\n {\n ...{\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n messages_config: faker.helpers.arrayElement([\n {\n ...{\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n messages: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ])\n }\n },\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getTranscriptionControllerGetTranscriptV2ResponseMock =\n (): TranscriptionControllerGetTranscriptV2200 =>\n faker.helpers.arrayElement([\n { ...getTranscriptionControllerGetTranscriptV2ResponsePreRecordedResponseMock() },\n { ...getTranscriptionControllerGetTranscriptV2ResponseStreamingResponseMock() }\n ])\n\nexport const getTranscriptionControllerGetAudioV2ResponseMock = (): Blob =>\n new Blob(faker.helpers.arrayElements(faker.word.words(10).split(\" \")))\n\nexport const getHistoryControllerGetListV1ResponseCustomVocabularyEntryDTOMock = (\n overrideResponse: Partial<CustomVocabularyEntryDTO> = {}\n): CustomVocabularyEntryDTO => ({\n ...{\n value: faker.string.alpha(20),\n intensity: faker.helpers.arrayElement([faker.number.int({ min: 0, max: 1 }), undefined]),\n pronunciations: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getHistoryControllerGetListV1ResponsePreRecordedResponseMock = (\n overrideResponse: Partial<PreRecordedResponse> = {}\n): PreRecordedResponse => ({\n ...{\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"pre-recorded\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n context_prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n custom_vocabulary: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n { ...getHistoryControllerGetListV1ResponseCustomVocabularyEntryDTOMock() },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n detect_language: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n enable_code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n code_switching_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n callback_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.internet.url(),\n method: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(CallbackMethodEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n subtitles_config: faker.helpers.arrayElement([\n {\n ...{\n formats: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(SubtitlesFormatEnum)),\n undefined\n ]),\n minimum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n maximum_duration: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 30 }),\n undefined\n ]),\n maximum_characters_per_row: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maximum_rows_per_caption: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 5 }),\n undefined\n ]),\n style: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubtitlesStyleEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n diarization_config: faker.helpers.arrayElement([\n {\n ...{\n number_of_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n min_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n max_speakers: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n name_consistency: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n structured_data_extraction_config: faker.helpers.arrayElement([\n {\n ...{\n classes: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n audio_to_llm_config: faker.helpers.arrayElement([\n {\n ...{\n prompts: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => [])\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n display_mode: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n punctuation_enhanced: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n audio_url: faker.helpers.arrayElement([faker.internet.url(), null])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n moderation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n name_consistency: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n speaker_reidentification: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n structured_data_extraction: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n audio_to_llm: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {\n ...{\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n response: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n }\n }))\n }\n },\n undefined\n ]),\n sentences: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n display_mode: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n diarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ])\n }\n },\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getHistoryControllerGetListV1ResponseStreamingResponseMock = (\n overrideResponse: Partial<StreamingResponse> = {}\n): StreamingResponse => ({\n ...{\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"live\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n encoding: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedEncodingEnum)),\n undefined\n ]),\n bit_depth: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8, 16, 24, 32] as const),\n undefined\n ]),\n sample_rate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8000, 16000, 32000, 44100, 48000] as const),\n undefined\n ]),\n channels: faker.helpers.arrayElement([faker.number.int({ min: 1, max: 8 }), undefined]),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedModels)),\n undefined\n ]),\n endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 0.01, max: 10 }),\n undefined\n ]),\n maximum_duration_without_endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 5, max: 60 }),\n undefined\n ]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n pre_processing: faker.helpers.arrayElement([\n {\n ...{\n audio_enhancer: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n speech_threshold: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n realtime_processing: faker.helpers.arrayElement([\n {\n ...{\n custom_vocabulary: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getHistoryControllerGetListV1ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n post_processing: faker.helpers.arrayElement([\n {\n ...{\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n messages_config: faker.helpers.arrayElement([\n {\n ...{\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n messages: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ])\n }\n },\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getHistoryControllerGetListV1ResponseMock = (\n overrideResponse: Partial<ListHistoryResponse> = {}\n): ListHistoryResponse => ({\n first: faker.internet.url(),\n current: faker.internet.url(),\n next: faker.helpers.arrayElement([faker.internet.url(), null]),\n items: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.helpers.arrayElement([\n { ...getHistoryControllerGetListV1ResponsePreRecordedResponseMock() },\n { ...getHistoryControllerGetListV1ResponseStreamingResponseMock() }\n ])\n ),\n ...overrideResponse\n})\n\nexport const getStreamingControllerInitStreamingSessionV2ResponseMock = (\n overrideResponse: Partial<InitStreamingResponse> = {}\n): InitStreamingResponse => ({\n id: faker.string.uuid(),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n url: faker.internet.url(),\n ...overrideResponse\n})\n\nexport const getStreamingControllerGetStreamingJobsV2ResponseCustomVocabularyEntryDTOMock = (\n overrideResponse: Partial<CustomVocabularyEntryDTO> = {}\n): CustomVocabularyEntryDTO => ({\n ...{\n value: faker.string.alpha(20),\n intensity: faker.helpers.arrayElement([faker.number.int({ min: 0, max: 1 }), undefined]),\n pronunciations: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getStreamingControllerGetStreamingJobsV2ResponseMock = (\n overrideResponse: Partial<ListStreamingResponse> = {}\n): ListStreamingResponse => ({\n first: faker.internet.url(),\n current: faker.internet.url(),\n next: faker.helpers.arrayElement([faker.internet.url(), null]),\n items: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"live\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n encoding: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedEncodingEnum)),\n undefined\n ]),\n bit_depth: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8, 16, 24, 32] as const),\n undefined\n ]),\n sample_rate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8000, 16000, 32000, 44100, 48000] as const),\n undefined\n ]),\n channels: faker.helpers.arrayElement([faker.number.int({ min: 1, max: 8 }), undefined]),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedModels)),\n undefined\n ]),\n endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 0.01, max: 10 }),\n undefined\n ]),\n maximum_duration_without_endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 5, max: 60 }),\n undefined\n ]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n pre_processing: faker.helpers.arrayElement([\n {\n ...{\n audio_enhancer: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n speech_threshold: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n realtime_processing: faker.helpers.arrayElement([\n {\n ...{\n custom_vocabulary: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getStreamingControllerGetStreamingJobsV2ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n post_processing: faker.helpers.arrayElement([\n {\n ...{\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n messages_config: faker.helpers.arrayElement([\n {\n ...{\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranscriptionLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(\n Object.values(TranscriptionLanguageCodeEnum)\n )\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n messages: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ])\n }\n },\n undefined\n ])\n })),\n ...overrideResponse\n})\n\nexport const getStreamingControllerGetStreamingJobV2ResponseCustomVocabularyEntryDTOMock = (\n overrideResponse: Partial<CustomVocabularyEntryDTO> = {}\n): CustomVocabularyEntryDTO => ({\n ...{\n value: faker.string.alpha(20),\n intensity: faker.helpers.arrayElement([faker.number.int({ min: 0, max: 1 }), undefined]),\n pronunciations: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n language: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ])\n },\n ...overrideResponse\n})\n\nexport const getStreamingControllerGetStreamingJobV2ResponseMock = (\n overrideResponse: Partial<StreamingResponse> = {}\n): StreamingResponse => ({\n id: faker.string.uuid(),\n request_id: faker.string.alpha(20),\n version: faker.number.int({ min: undefined, max: undefined }),\n status: faker.helpers.arrayElement([\"queued\", \"processing\", \"done\", \"error\"] as const),\n created_at: `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n completed_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([`${faker.date.past().toISOString().split(\".\")[0]}Z`, null]),\n undefined\n ]),\n custom_metadata: faker.helpers.arrayElement([{}, undefined]),\n error_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: 400, max: 599 }), null]),\n undefined\n ]),\n post_session_metadata: {},\n kind: faker.helpers.arrayElement([\"live\"] as const),\n file: faker.helpers.arrayElement([\n {\n ...{\n id: faker.string.alpha(20),\n filename: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n source: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_duration: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n null\n ]),\n number_of_channels: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n null\n ])\n }\n },\n undefined\n ]),\n request_params: faker.helpers.arrayElement([\n {\n ...{\n encoding: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedEncodingEnum)),\n undefined\n ]),\n bit_depth: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8, 16, 24, 32] as const),\n undefined\n ]),\n sample_rate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([8000, 16000, 32000, 44100, 48000] as const),\n undefined\n ]),\n channels: faker.helpers.arrayElement([faker.number.int({ min: 1, max: 8 }), undefined]),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(StreamingSupportedModels)),\n undefined\n ]),\n endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 0.01, max: 10 }),\n undefined\n ]),\n maximum_duration_without_endpointing: faker.helpers.arrayElement([\n faker.number.int({ min: 5, max: 60 }),\n undefined\n ]),\n language_config: faker.helpers.arrayElement([\n {\n ...{\n languages: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n undefined\n ]),\n code_switching: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n pre_processing: faker.helpers.arrayElement([\n {\n ...{\n audio_enhancer: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n speech_threshold: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n realtime_processing: faker.helpers.arrayElement([\n {\n ...{\n custom_vocabulary: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_vocabulary_config: faker.helpers.arrayElement([\n {\n ...{\n vocabulary: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n faker.helpers.arrayElement([\n {\n ...getStreamingControllerGetStreamingJobV2ResponseCustomVocabularyEntryDTOMock()\n },\n faker.string.alpha(20)\n ])\n ),\n default_intensity: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: 1 }),\n undefined\n ])\n }\n },\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n custom_spelling_config: faker.helpers.arrayElement([\n {\n ...{\n spelling_dictionary: {\n [faker.string.alphanumeric(5)]: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n }\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n translation_config: faker.helpers.arrayElement([\n {\n ...{\n target_languages: faker.helpers.arrayElements(\n Object.values(TranslationLanguageCodeEnum)\n ),\n model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranslationModelEnum)),\n undefined\n ]),\n match_original_utterances: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n lipsync: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n context_adaptation: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n context: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n informal: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n post_processing: faker.helpers.arrayElement([\n {\n ...{\n summarization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n summarization_config: faker.helpers.arrayElement([\n {\n ...{\n type: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SummaryTypesEnum)),\n undefined\n ])\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n }\n },\n undefined\n ]),\n messages_config: faker.helpers.arrayElement([\n {\n ...{\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n callback: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n callback_config: faker.helpers.arrayElement([\n {\n ...{\n url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n receive_partial_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_final_transcripts: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_speech_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_pre_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_realtime_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_post_processing_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_acknowledgments: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n receive_errors: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n receive_lifecycle_events: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ])\n }\n },\n undefined\n ]),\n result: faker.helpers.arrayElement([\n {\n ...{\n metadata: {\n ...{\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n number_of_distinct_channels: faker.number.int({ min: 1, max: undefined }),\n billing_time: faker.number.int({ min: undefined, max: undefined }),\n transcription_time: faker.number.int({ min: undefined, max: undefined })\n }\n },\n transcription: faker.helpers.arrayElement([\n {\n ...{\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(Object.values(TranscriptionLanguageCodeEnum)),\n sentences: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })\n ),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })\n ),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }\n },\n undefined\n ]),\n translation: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n full_transcript: faker.string.alpha(20),\n languages: faker.helpers.arrayElements(Object.values(TranslationLanguageCodeEnum)),\n sentences: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20))\n })),\n undefined\n ]),\n subtitles: faker.helpers.arrayElement([\n Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n format: faker.helpers.arrayElement(Object.values(SubtitlesFormatEnum)),\n subtitles: faker.string.alpha(20)\n })),\n undefined\n ]),\n utterances: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined }),\n channel: faker.number.int({ min: 0, max: undefined }),\n speaker: faker.helpers.arrayElement([\n faker.number.int({ min: 0, max: undefined }),\n undefined\n ]),\n words: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n word: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.int({ min: undefined, max: undefined })\n })),\n text: faker.string.alpha(20),\n language: faker.helpers.arrayElement(Object.values(TranscriptionLanguageCodeEnum))\n }))\n }))\n }\n },\n undefined\n ]),\n summarization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.helpers.arrayElement([faker.string.alpha(20), null])\n }\n },\n undefined\n ]),\n named_entity_recognition: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n entity: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: faker.string.alpha(20)\n }\n },\n undefined\n ]),\n chapterization: faker.helpers.arrayElement([\n {\n ...{\n success: faker.datatype.boolean(),\n is_empty: faker.datatype.boolean(),\n exec_time: faker.number.int({ min: undefined, max: undefined }),\n error: {\n ...{\n status_code: faker.number.int({ min: undefined, max: undefined }),\n exception: faker.string.alpha(20),\n message: faker.string.alpha(20)\n }\n },\n results: {}\n }\n },\n undefined\n ]),\n messages: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ])\n }\n },\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getStreamingControllerGetAudioV2ResponseMock = (): Blob =>\n new Blob(faker.helpers.arrayElements(faker.word.words(10).split(\" \")))\n\nexport const getFileControllerUploadV2MockHandler = (\n overrideResponse?:\n | AudioUploadResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<AudioUploadResponse> | AudioUploadResponse)\n) => {\n return http.post(\"https://api.gladia.io/v2/upload\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getFileControllerUploadV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getPreRecordedControllerInitPreRecordedJobV2MockHandler = (\n overrideResponse?:\n | InitPreRecordedTranscriptionResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<InitPreRecordedTranscriptionResponse> | InitPreRecordedTranscriptionResponse)\n) => {\n return http.post(\"https://api.gladia.io/v2/pre-recorded\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getPreRecordedControllerInitPreRecordedJobV2ResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getPreRecordedControllerGetPreRecordedJobsV2MockHandler = (\n overrideResponse?:\n | ListPreRecordedResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ListPreRecordedResponse> | ListPreRecordedResponse)\n) => {\n return http.get(\"https://api.gladia.io/v2/pre-recorded\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getPreRecordedControllerGetPreRecordedJobsV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getPreRecordedControllerGetPreRecordedJobV2MockHandler = (\n overrideResponse?:\n | PreRecordedResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PreRecordedResponse> | PreRecordedResponse)\n) => {\n return http.get(\"https://api.gladia.io/v2/pre-recorded/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getPreRecordedControllerGetPreRecordedJobV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getPreRecordedControllerDeletePreRecordedJobV2MockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.gladia.io/v2/pre-recorded/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 202 })\n })\n}\n\nexport const getPreRecordedControllerGetAudioV2MockHandler = (\n overrideResponse?:\n | Blob\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Blob> | Blob)\n) => {\n return http.get(\"https://api.gladia.io/v2/pre-recorded/:id/file\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getPreRecordedControllerGetAudioV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionControllerInitPreRecordedJobV2MockHandler = (\n overrideResponse?:\n | InitPreRecordedTranscriptionResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<InitPreRecordedTranscriptionResponse> | InitPreRecordedTranscriptionResponse)\n) => {\n return http.post(\"https://api.gladia.io/v2/transcription\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionControllerInitPreRecordedJobV2ResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionControllerListV2MockHandler = (\n overrideResponse?:\n | ListTranscriptionResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ListTranscriptionResponse> | ListTranscriptionResponse)\n) => {\n return http.get(\"https://api.gladia.io/v2/transcription\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionControllerListV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionControllerGetTranscriptV2MockHandler = (\n overrideResponse?:\n | TranscriptionControllerGetTranscriptV2200\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) =>\n | Promise<TranscriptionControllerGetTranscriptV2200>\n | TranscriptionControllerGetTranscriptV2200)\n) => {\n return http.get(\"https://api.gladia.io/v2/transcription/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionControllerGetTranscriptV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionControllerDeleteTranscriptV2MockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.gladia.io/v2/transcription/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 202 })\n })\n}\n\nexport const getTranscriptionControllerGetAudioV2MockHandler = (\n overrideResponse?:\n | Blob\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Blob> | Blob)\n) => {\n return http.get(\"https://api.gladia.io/v2/transcription/:id/file\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionControllerGetAudioV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getAudioToTextControllerAudioTranscriptionMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<void> | void)\n) => {\n return http.post(\"https://api.gladia.io/audio/text/audio-transcription\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 200 })\n })\n}\n\nexport const getVideoToTextControllerVideoTranscriptionMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<void> | void)\n) => {\n return http.post(\"https://api.gladia.io/video/text/video-transcription\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 200 })\n })\n}\n\nexport const getHistoryControllerGetListV1MockHandler = (\n overrideResponse?:\n | ListHistoryResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ListHistoryResponse> | ListHistoryResponse)\n) => {\n return http.get(\"https://api.gladia.io/v1/history\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getHistoryControllerGetListV1ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getStreamingControllerInitStreamingSessionV2MockHandler = (\n overrideResponse?:\n | InitStreamingResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<InitStreamingResponse> | InitStreamingResponse)\n) => {\n return http.post(\"https://api.gladia.io/v2/live\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getStreamingControllerInitStreamingSessionV2ResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getStreamingControllerGetStreamingJobsV2MockHandler = (\n overrideResponse?:\n | ListStreamingResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ListStreamingResponse> | ListStreamingResponse)\n) => {\n return http.get(\"https://api.gladia.io/v2/live\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getStreamingControllerGetStreamingJobsV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getStreamingControllerGetStreamingJobV2MockHandler = (\n overrideResponse?:\n | StreamingResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<StreamingResponse> | StreamingResponse)\n) => {\n return http.get(\"https://api.gladia.io/v2/live/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getStreamingControllerGetStreamingJobV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getStreamingControllerDeleteStreamingJobV2MockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.gladia.io/v2/live/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 202 })\n })\n}\n\nexport const getStreamingControllerPatchRequestParamsV2MockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.patch>[1]>[0]) => Promise<void> | void)\n) => {\n return http.patch(\"https://api.gladia.io/v2/live/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getStreamingControllerGetAudioV2MockHandler = (\n overrideResponse?:\n | Blob\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Blob> | Blob)\n) => {\n return http.get(\"https://api.gladia.io/v2/live/:id/file\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getStreamingControllerGetAudioV2ResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\nexport const getGladiaControlAPIMock = () => [\n getFileControllerUploadV2MockHandler(),\n getPreRecordedControllerInitPreRecordedJobV2MockHandler(),\n getPreRecordedControllerGetPreRecordedJobsV2MockHandler(),\n getPreRecordedControllerGetPreRecordedJobV2MockHandler(),\n getPreRecordedControllerDeletePreRecordedJobV2MockHandler(),\n getPreRecordedControllerGetAudioV2MockHandler(),\n getTranscriptionControllerInitPreRecordedJobV2MockHandler(),\n getTranscriptionControllerListV2MockHandler(),\n getTranscriptionControllerGetTranscriptV2MockHandler(),\n getTranscriptionControllerDeleteTranscriptV2MockHandler(),\n getTranscriptionControllerGetAudioV2MockHandler(),\n getAudioToTextControllerAudioTranscriptionMockHandler(),\n getVideoToTextControllerVideoTranscriptionMockHandler(),\n getHistoryControllerGetListV1MockHandler(),\n getStreamingControllerInitStreamingSessionV2MockHandler(),\n getStreamingControllerGetStreamingJobsV2MockHandler(),\n getStreamingControllerGetStreamingJobV2MockHandler(),\n getStreamingControllerDeleteStreamingJobV2MockHandler(),\n getStreamingControllerPatchRequestParamsV2MockHandler(),\n getStreamingControllerGetAudioV2MockHandler()\n]\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport * from \"./addonErrorDTO\"\nexport * from \"./audioChunkAckData\"\nexport * from \"./audioChunkAckMessage\"\nexport * from \"./audioChunkAckMessageData\"\nexport * from \"./audioChunkAckMessageError\"\nexport * from \"./audioChunkAckMessageType\"\nexport * from \"./audioChunkAction\"\nexport * from \"./audioChunkActionData\"\nexport * from \"./audioChunkActionType\"\nexport * from \"./audioToLlmDTO\"\nexport * from \"./audioToLlmDTOError\"\nexport * from \"./audioToLlmDTOResults\"\nexport * from \"./audioToLlmListConfigDTO\"\nexport * from \"./audioToLlmListDTO\"\nexport * from \"./audioToLlmListDTOError\"\nexport * from \"./audioToLlmResultDTO\"\nexport * from \"./audioToTextControllerAudioTranscriptionBody\"\nexport * from \"./audioToTextControllerAudioTranscriptionBodyLanguage\"\nexport * from \"./audioToTextControllerAudioTranscriptionBodyLanguageBehaviour\"\nexport * from \"./audioToTextControllerAudioTranscriptionBodyOutputFormat\"\nexport * from \"./audioToTextControllerAudioTranscriptionBodyTargetTranslationLanguage\"\nexport * from \"./audioUploadMetadataDTO\"\nexport * from \"./audioUploadResponse\"\nexport * from \"./badRequestErrorResponse\"\nexport * from \"./callbackConfig\"\nexport * from \"./callbackConfigDto\"\nexport * from \"./callbackLiveAudioChunkAckMessage\"\nexport * from \"./callbackLiveAudioChunkAckMessageEvent\"\nexport * from \"./callbackLiveEndRecordingMessage\"\nexport * from \"./callbackLiveEndRecordingMessageEvent\"\nexport * from \"./callbackLiveEndSessionMessage\"\nexport * from \"./callbackLiveEndSessionMessageEvent\"\nexport * from \"./callbackLiveNamedEntityRecognitionMessage\"\nexport * from \"./callbackLiveNamedEntityRecognitionMessageEvent\"\nexport * from \"./callbackLivePostChapterizationMessage\"\nexport * from \"./callbackLivePostChapterizationMessageEvent\"\nexport * from \"./callbackLivePostFinalTranscriptMessage\"\nexport * from \"./callbackLivePostFinalTranscriptMessageEvent\"\nexport * from \"./callbackLivePostSummarizationMessage\"\nexport * from \"./callbackLivePostSummarizationMessageEvent\"\nexport * from \"./callbackLivePostTranscriptMessage\"\nexport * from \"./callbackLivePostTranscriptMessageEvent\"\nexport * from \"./callbackLiveSentimentAnalysisMessage\"\nexport * from \"./callbackLiveSentimentAnalysisMessageEvent\"\nexport * from \"./callbackLiveSpeechEndMessage\"\nexport * from \"./callbackLiveSpeechEndMessageEvent\"\nexport * from \"./callbackLiveSpeechStartMessage\"\nexport * from \"./callbackLiveSpeechStartMessageEvent\"\nexport * from \"./callbackLiveStartRecordingMessage\"\nexport * from \"./callbackLiveStartRecordingMessageEvent\"\nexport * from \"./callbackLiveStartSessionMessage\"\nexport * from \"./callbackLiveStartSessionMessageEvent\"\nexport * from \"./callbackLiveStopRecordingAckMessage\"\nexport * from \"./callbackLiveStopRecordingAckMessageEvent\"\nexport * from \"./callbackLiveTranscriptMessage\"\nexport * from \"./callbackLiveTranscriptMessageEvent\"\nexport * from \"./callbackLiveTranslationMessage\"\nexport * from \"./callbackLiveTranslationMessageEvent\"\nexport * from \"./callbackMethodEnum\"\nexport * from \"./callbackTranscriptionErrorPayload\"\nexport * from \"./callbackTranscriptionErrorPayloadCustomMetadata\"\nexport * from \"./callbackTranscriptionErrorPayloadEvent\"\nexport * from \"./callbackTranscriptionSuccessPayload\"\nexport * from \"./callbackTranscriptionSuccessPayloadCustomMetadata\"\nexport * from \"./callbackTranscriptionSuccessPayloadEvent\"\nexport * from \"./chapterizationDTO\"\nexport * from \"./chapterizationDTOError\"\nexport * from \"./chapterizationDTOResults\"\nexport * from \"./chapterizationSentence\"\nexport * from \"./codeSwitchingConfigDTO\"\nexport * from \"./customSpellingConfigDTO\"\nexport * from \"./customSpellingConfigDTOSpellingDictionary\"\nexport * from \"./customVocabularyConfigDTO\"\nexport * from \"./customVocabularyConfigDTOVocabularyItem\"\nexport * from \"./customVocabularyEntryDTO\"\nexport * from \"./diarizationConfigDTO\"\nexport * from \"./diarizationDTO\"\nexport * from \"./diarizationDTOError\"\nexport * from \"./displayModeDTO\"\nexport * from \"./displayModeDTOError\"\nexport * from \"./endRecordingMessage\"\nexport * from \"./endRecordingMessageData\"\nexport * from \"./endRecordingMessageType\"\nexport * from \"./endSessionMessage\"\nexport * from \"./endSessionMessageType\"\nexport * from \"./error\"\nexport * from \"./errorDTO\"\nexport * from \"./fileControllerUploadV2BodyOne\"\nexport * from \"./fileControllerUploadV2BodyTwo\"\nexport * from \"./fileResponse\"\nexport * from \"./forbiddenErrorResponse\"\nexport * from \"./historyControllerGetListV1KindItem\"\nexport * from \"./historyControllerGetListV1Params\"\nexport * from \"./historyControllerGetListV1StatusItem\"\nexport * from \"./initPreRecordedTranscriptionResponse\"\nexport * from \"./initStreamingResponse\"\nexport * from \"./initTranscriptionRequest\"\nexport * from \"./initTranscriptionRequestCustomMetadata\"\nexport * from \"./languageConfig\"\nexport * from \"./listHistoryResponse\"\nexport * from \"./listHistoryResponseItemsItem\"\nexport * from \"./listPreRecordedResponse\"\nexport * from \"./listStreamingResponse\"\nexport * from \"./listTranscriptionResponse\"\nexport * from \"./listTranscriptionResponseItemsItem\"\nexport * from \"./liveEventPayload\"\nexport * from \"./messagesConfig\"\nexport * from \"./moderationDTO\"\nexport * from \"./moderationDTOError\"\nexport * from \"./namedEntityRecognitionData\"\nexport * from \"./namedEntityRecognitionDTO\"\nexport * from \"./namedEntityRecognitionDTOError\"\nexport * from \"./namedEntityRecognitionMessage\"\nexport * from \"./namedEntityRecognitionMessageData\"\nexport * from \"./namedEntityRecognitionMessageError\"\nexport * from \"./namedEntityRecognitionMessageType\"\nexport * from \"./namedEntityRecognitionResult\"\nexport * from \"./namesConsistencyDTO\"\nexport * from \"./namesConsistencyDTOError\"\nexport * from \"./notFoundErrorResponse\"\nexport * from \"./patchRequestParamsDTO\"\nexport * from \"./payloadTooLargeErrorResponse\"\nexport * from \"./postChapterizationMessage\"\nexport * from \"./postChapterizationMessageData\"\nexport * from \"./postChapterizationMessageDataProperty\"\nexport * from \"./postChapterizationMessageError\"\nexport * from \"./postChapterizationMessageType\"\nexport * from \"./postChapterizationResult\"\nexport * from \"./postFinalTranscriptMessage\"\nexport * from \"./postFinalTranscriptMessageType\"\nexport * from \"./postProcessingConfig\"\nexport * from \"./postSummarizationMessage\"\nexport * from \"./postSummarizationMessageData\"\nexport * from \"./postSummarizationMessageDataProperty\"\nexport * from \"./postSummarizationMessageError\"\nexport * from \"./postSummarizationMessageType\"\nexport * from \"./postTranscriptMessage\"\nexport * from \"./postTranscriptMessageType\"\nexport * from \"./preProcessingConfig\"\nexport * from \"./preRecordedControllerGetPreRecordedJobsV2Params\"\nexport * from \"./preRecordedControllerGetPreRecordedJobsV2StatusItem\"\nexport * from \"./preRecordedEventPayload\"\nexport * from \"./preRecordedRequestParamsResponse\"\nexport * from \"./preRecordedResponse\"\nexport * from \"./preRecordedResponseCustomMetadata\"\nexport * from \"./preRecordedResponseFile\"\nexport * from \"./preRecordedResponseKind\"\nexport * from \"./preRecordedResponsePostSessionMetadata\"\nexport * from \"./preRecordedResponseRequestParams\"\nexport * from \"./preRecordedResponseResult\"\nexport * from \"./preRecordedResponseStatus\"\nexport * from \"./realtimeProcessingConfig\"\nexport * from \"./sentencesDTO\"\nexport * from \"./sentencesDTOError\"\nexport * from \"./sentimentAnalysisData\"\nexport * from \"./sentimentAnalysisDTO\"\nexport * from \"./sentimentAnalysisDTOError\"\nexport * from \"./sentimentAnalysisMessage\"\nexport * from \"./sentimentAnalysisMessageData\"\nexport * from \"./sentimentAnalysisMessageError\"\nexport * from \"./sentimentAnalysisMessageType\"\nexport * from \"./sentimentAnalysisResult\"\nexport * from \"./speakerReidentificationDTO\"\nexport * from \"./speakerReidentificationDTOError\"\nexport * from \"./speechEndMessage\"\nexport * from \"./speechEndMessageType\"\nexport * from \"./speechMessageData\"\nexport * from \"./speechStartMessage\"\nexport * from \"./speechStartMessageType\"\nexport * from \"./startRecordingMessage\"\nexport * from \"./startRecordingMessageType\"\nexport * from \"./startSessionMessage\"\nexport * from \"./startSessionMessageType\"\nexport * from \"./stopRecordingAckData\"\nexport * from \"./stopRecordingAckMessage\"\nexport * from \"./stopRecordingAckMessageData\"\nexport * from \"./stopRecordingAckMessageError\"\nexport * from \"./stopRecordingAckMessageType\"\nexport * from \"./stopRecordingAction\"\nexport * from \"./stopRecordingActionType\"\nexport * from \"./streamingControllerGetStreamingJobsV2Params\"\nexport * from \"./streamingControllerGetStreamingJobsV2StatusItem\"\nexport * from \"./streamingControllerInitStreamingSessionV2Params\"\nexport * from \"./streamingRequest\"\nexport * from \"./streamingRequestCustomMetadata\"\nexport * from \"./streamingRequestParamsResponse\"\nexport * from \"./streamingResponse\"\nexport * from \"./streamingResponseCustomMetadata\"\nexport * from \"./streamingResponseFile\"\nexport * from \"./streamingResponseKind\"\nexport * from \"./streamingResponsePostSessionMetadata\"\nexport * from \"./streamingResponseRequestParams\"\nexport * from \"./streamingResponseResult\"\nexport * from \"./streamingResponseStatus\"\nexport * from \"./streamingSupportedBitDepthEnum\"\nexport * from \"./streamingSupportedEncodingEnum\"\nexport * from \"./streamingSupportedModels\"\nexport * from \"./streamingSupportedRegions\"\nexport * from \"./streamingSupportedSampleRateEnum\"\nexport * from \"./streamingTranscriptionResultDTO\"\nexport * from \"./streamingTranscriptionResultWithMessagesDTO\"\nexport * from \"./structuredDataExtractionConfigDTO\"\nexport * from \"./structuredDataExtractionDTO\"\nexport * from \"./structuredDataExtractionDTOError\"\nexport * from \"./subtitleDTO\"\nexport * from \"./subtitlesConfigDTO\"\nexport * from \"./subtitlesFormatEnum\"\nexport * from \"./subtitlesStyleEnum\"\nexport * from \"./summarizationConfigDTO\"\nexport * from \"./summarizationDTO\"\nexport * from \"./summarizationDTOError\"\nexport * from \"./summaryTypesEnum\"\nexport * from \"./transcriptionControllerGetTranscriptV2200\"\nexport * from \"./transcriptionControllerListV2KindItem\"\nexport * from \"./transcriptionControllerListV2Params\"\nexport * from \"./transcriptionControllerListV2StatusItem\"\nexport * from \"./transcriptionDTO\"\nexport * from \"./transcriptionLanguageCodeEnum\"\nexport * from \"./transcriptionMetadataDTO\"\nexport * from \"./transcriptionResultDTO\"\nexport * from \"./transcriptMessage\"\nexport * from \"./transcriptMessageData\"\nexport * from \"./transcriptMessageType\"\nexport * from \"./translationConfigDTO\"\nexport * from \"./translationData\"\nexport * from \"./translationDTO\"\nexport * from \"./translationDTOError\"\nexport * from \"./translationLanguageCodeEnum\"\nexport * from \"./translationMessage\"\nexport * from \"./translationMessageData\"\nexport * from \"./translationMessageError\"\nexport * from \"./translationMessageType\"\nexport * from \"./translationModelEnum\"\nexport * from \"./translationResultDTO\"\nexport * from \"./translationResultDTOError\"\nexport * from \"./unauthorizedErrorResponse\"\nexport * from \"./unprocessableEntityErrorResponse\"\nexport * from \"./uploadBody\"\nexport * from \"./utteranceDTO\"\nexport * from \"./videoToTextControllerVideoTranscriptionBody\"\nexport * from \"./videoToTextControllerVideoTranscriptionBodyLanguage\"\nexport * from \"./videoToTextControllerVideoTranscriptionBodyLanguageBehaviour\"\nexport * from \"./videoToTextControllerVideoTranscriptionBodyOutputFormat\"\nexport * from \"./videoToTextControllerVideoTranscriptionBodyTargetTranslationLanguage\"\nexport * from \"./webhookLiveEndRecordingPayload\"\nexport * from \"./webhookLiveEndRecordingPayloadEvent\"\nexport * from \"./webhookLiveEndSessionPayload\"\nexport * from \"./webhookLiveEndSessionPayloadEvent\"\nexport * from \"./webhookLiveStartRecordingPayload\"\nexport * from \"./webhookLiveStartRecordingPayloadEvent\"\nexport * from \"./webhookLiveStartSessionPayload\"\nexport * from \"./webhookLiveStartSessionPayloadEvent\"\nexport * from \"./webhookTranscriptionCreatedPayload\"\nexport * from \"./webhookTranscriptionCreatedPayloadEvent\"\nexport * from \"./webhookTranscriptionErrorPayload\"\nexport * from \"./webhookTranscriptionErrorPayloadEvent\"\nexport * from \"./webhookTranscriptionSuccessPayload\"\nexport * from \"./webhookTranscriptionSuccessPayloadEvent\"\nexport * from \"./wordDTO\"\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type AudioChunkAckMessageType =\n (typeof AudioChunkAckMessageType)[keyof typeof AudioChunkAckMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const AudioChunkAckMessageType = {\n audio_chunk: \"audio_chunk\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type AudioChunkActionType = (typeof AudioChunkActionType)[keyof typeof AudioChunkActionType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const AudioChunkActionType = {\n audio_chunk: \"audio_chunk\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type AudioToTextControllerAudioTranscriptionBodyLanguage =\n (typeof AudioToTextControllerAudioTranscriptionBodyLanguage)[keyof typeof AudioToTextControllerAudioTranscriptionBodyLanguage]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const AudioToTextControllerAudioTranscriptionBodyLanguage = {\n afrikaans: \"afrikaans\",\n albanian: \"albanian\",\n amharic: \"amharic\",\n arabic: \"arabic\",\n armenian: \"armenian\",\n assamese: \"assamese\",\n azerbaijani: \"azerbaijani\",\n bashkir: \"bashkir\",\n basque: \"basque\",\n belarusian: \"belarusian\",\n bengali: \"bengali\",\n bosnian: \"bosnian\",\n breton: \"breton\",\n bulgarian: \"bulgarian\",\n catalan: \"catalan\",\n chinese: \"chinese\",\n croatian: \"croatian\",\n czech: \"czech\",\n danish: \"danish\",\n dutch: \"dutch\",\n english: \"english\",\n estonian: \"estonian\",\n faroese: \"faroese\",\n finnish: \"finnish\",\n french: \"french\",\n galician: \"galician\",\n georgian: \"georgian\",\n german: \"german\",\n greek: \"greek\",\n gujarati: \"gujarati\",\n haitian_creole: \"haitian creole\",\n hausa: \"hausa\",\n hawaiian: \"hawaiian\",\n hebrew: \"hebrew\",\n hindi: \"hindi\",\n hungarian: \"hungarian\",\n icelandic: \"icelandic\",\n indonesian: \"indonesian\",\n italian: \"italian\",\n japanese: \"japanese\",\n javanese: \"javanese\",\n kannada: \"kannada\",\n kazakh: \"kazakh\",\n khmer: \"khmer\",\n korean: \"korean\",\n lao: \"lao\",\n latin: \"latin\",\n latvian: \"latvian\",\n lingala: \"lingala\",\n lithuanian: \"lithuanian\",\n luxembourgish: \"luxembourgish\",\n macedonian: \"macedonian\",\n malagasy: \"malagasy\",\n malay: \"malay\",\n malayalam: \"malayalam\",\n maltese: \"maltese\",\n maori: \"maori\",\n marathi: \"marathi\",\n mongolian: \"mongolian\",\n myanmar: \"myanmar\",\n nepali: \"nepali\",\n norwegian: \"norwegian\",\n nynorsk: \"nynorsk\",\n occitan: \"occitan\",\n pashto: \"pashto\",\n persian: \"persian\",\n polish: \"polish\",\n portuguese: \"portuguese\",\n punjabi: \"punjabi\",\n romanian: \"romanian\",\n russian: \"russian\",\n sanskrit: \"sanskrit\",\n serbian: \"serbian\",\n shona: \"shona\",\n sindhi: \"sindhi\",\n sinhala: \"sinhala\",\n slovak: \"slovak\",\n slovenian: \"slovenian\",\n somali: \"somali\",\n spanish: \"spanish\",\n sundanese: \"sundanese\",\n swahili: \"swahili\",\n swedish: \"swedish\",\n tagalog: \"tagalog\",\n tajik: \"tajik\",\n tamil: \"tamil\",\n tatar: \"tatar\",\n telugu: \"telugu\",\n thai: \"thai\",\n tibetan: \"tibetan\",\n turkish: \"turkish\",\n turkmen: \"turkmen\",\n ukrainian: \"ukrainian\",\n urdu: \"urdu\",\n uzbek: \"uzbek\",\n vietnamese: \"vietnamese\",\n welsh: \"welsh\",\n yiddish: \"yiddish\",\n yoruba: \"yoruba\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type AudioToTextControllerAudioTranscriptionBodyLanguageBehaviour =\n (typeof AudioToTextControllerAudioTranscriptionBodyLanguageBehaviour)[keyof typeof AudioToTextControllerAudioTranscriptionBodyLanguageBehaviour]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const AudioToTextControllerAudioTranscriptionBodyLanguageBehaviour = {\n automatic_single_language: \"automatic single language\",\n automatic_multiple_languages: \"automatic multiple languages\",\n manual: \"manual\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type AudioToTextControllerAudioTranscriptionBodyOutputFormat =\n (typeof AudioToTextControllerAudioTranscriptionBodyOutputFormat)[keyof typeof AudioToTextControllerAudioTranscriptionBodyOutputFormat]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const AudioToTextControllerAudioTranscriptionBodyOutputFormat = {\n json: \"json\",\n srt: \"srt\",\n vtt: \"vtt\",\n plain: \"plain\",\n txt: \"txt\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type AudioToTextControllerAudioTranscriptionBodyTargetTranslationLanguage =\n (typeof AudioToTextControllerAudioTranscriptionBodyTargetTranslationLanguage)[keyof typeof AudioToTextControllerAudioTranscriptionBodyTargetTranslationLanguage]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const AudioToTextControllerAudioTranscriptionBodyTargetTranslationLanguage = {\n afrikaans: \"afrikaans\",\n albanian: \"albanian\",\n amharic: \"amharic\",\n arabic: \"arabic\",\n armenian: \"armenian\",\n assamese: \"assamese\",\n azerbaijani: \"azerbaijani\",\n bashkir: \"bashkir\",\n basque: \"basque\",\n belarusian: \"belarusian\",\n bengali: \"bengali\",\n bosnian: \"bosnian\",\n breton: \"breton\",\n bulgarian: \"bulgarian\",\n catalan: \"catalan\",\n chinese: \"chinese\",\n croatian: \"croatian\",\n czech: \"czech\",\n danish: \"danish\",\n dutch: \"dutch\",\n english: \"english\",\n estonian: \"estonian\",\n faroese: \"faroese\",\n finnish: \"finnish\",\n french: \"french\",\n galician: \"galician\",\n georgian: \"georgian\",\n german: \"german\",\n greek: \"greek\",\n gujarati: \"gujarati\",\n haitian_creole: \"haitian creole\",\n hausa: \"hausa\",\n hawaiian: \"hawaiian\",\n hebrew: \"hebrew\",\n hindi: \"hindi\",\n hungarian: \"hungarian\",\n icelandic: \"icelandic\",\n indonesian: \"indonesian\",\n italian: \"italian\",\n japanese: \"japanese\",\n javanese: \"javanese\",\n kannada: \"kannada\",\n kazakh: \"kazakh\",\n khmer: \"khmer\",\n korean: \"korean\",\n lao: \"lao\",\n latin: \"latin\",\n latvian: \"latvian\",\n lingala: \"lingala\",\n lithuanian: \"lithuanian\",\n luxembourgish: \"luxembourgish\",\n macedonian: \"macedonian\",\n malagasy: \"malagasy\",\n malay: \"malay\",\n malayalam: \"malayalam\",\n maltese: \"maltese\",\n maori: \"maori\",\n marathi: \"marathi\",\n mongolian: \"mongolian\",\n myanmar: \"myanmar\",\n nepali: \"nepali\",\n norwegian: \"norwegian\",\n nynorsk: \"nynorsk\",\n occitan: \"occitan\",\n pashto: \"pashto\",\n persian: \"persian\",\n polish: \"polish\",\n portuguese: \"portuguese\",\n punjabi: \"punjabi\",\n romanian: \"romanian\",\n russian: \"russian\",\n sanskrit: \"sanskrit\",\n serbian: \"serbian\",\n shona: \"shona\",\n sindhi: \"sindhi\",\n sinhala: \"sinhala\",\n slovak: \"slovak\",\n slovenian: \"slovenian\",\n somali: \"somali\",\n spanish: \"spanish\",\n sundanese: \"sundanese\",\n swahili: \"swahili\",\n swedish: \"swedish\",\n tagalog: \"tagalog\",\n tajik: \"tajik\",\n tamil: \"tamil\",\n tatar: \"tatar\",\n telugu: \"telugu\",\n thai: \"thai\",\n tibetan: \"tibetan\",\n turkish: \"turkish\",\n turkmen: \"turkmen\",\n ukrainian: \"ukrainian\",\n urdu: \"urdu\",\n uzbek: \"uzbek\",\n vietnamese: \"vietnamese\",\n welsh: \"welsh\",\n wolof: \"wolof\",\n yiddish: \"yiddish\",\n yoruba: \"yoruba\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveAudioChunkAckMessageEvent =\n (typeof CallbackLiveAudioChunkAckMessageEvent)[keyof typeof CallbackLiveAudioChunkAckMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveAudioChunkAckMessageEvent = {\n liveaudio_chunk: \"live.audio_chunk\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveEndRecordingMessageEvent =\n (typeof CallbackLiveEndRecordingMessageEvent)[keyof typeof CallbackLiveEndRecordingMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveEndRecordingMessageEvent = {\n liveend_recording: \"live.end_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveEndSessionMessageEvent =\n (typeof CallbackLiveEndSessionMessageEvent)[keyof typeof CallbackLiveEndSessionMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveEndSessionMessageEvent = {\n liveend_session: \"live.end_session\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveNamedEntityRecognitionMessageEvent =\n (typeof CallbackLiveNamedEntityRecognitionMessageEvent)[keyof typeof CallbackLiveNamedEntityRecognitionMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveNamedEntityRecognitionMessageEvent = {\n livenamed_entity_recognition: \"live.named_entity_recognition\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLivePostChapterizationMessageEvent =\n (typeof CallbackLivePostChapterizationMessageEvent)[keyof typeof CallbackLivePostChapterizationMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLivePostChapterizationMessageEvent = {\n livepost_chapterization: \"live.post_chapterization\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLivePostFinalTranscriptMessageEvent =\n (typeof CallbackLivePostFinalTranscriptMessageEvent)[keyof typeof CallbackLivePostFinalTranscriptMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLivePostFinalTranscriptMessageEvent = {\n livepost_final_transcript: \"live.post_final_transcript\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLivePostSummarizationMessageEvent =\n (typeof CallbackLivePostSummarizationMessageEvent)[keyof typeof CallbackLivePostSummarizationMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLivePostSummarizationMessageEvent = {\n livepost_summarization: \"live.post_summarization\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLivePostTranscriptMessageEvent =\n (typeof CallbackLivePostTranscriptMessageEvent)[keyof typeof CallbackLivePostTranscriptMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLivePostTranscriptMessageEvent = {\n livepost_transcript: \"live.post_transcript\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveSentimentAnalysisMessageEvent =\n (typeof CallbackLiveSentimentAnalysisMessageEvent)[keyof typeof CallbackLiveSentimentAnalysisMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveSentimentAnalysisMessageEvent = {\n livesentiment_analysis: \"live.sentiment_analysis\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveSpeechEndMessageEvent =\n (typeof CallbackLiveSpeechEndMessageEvent)[keyof typeof CallbackLiveSpeechEndMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveSpeechEndMessageEvent = {\n livespeech_end: \"live.speech_end\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveSpeechStartMessageEvent =\n (typeof CallbackLiveSpeechStartMessageEvent)[keyof typeof CallbackLiveSpeechStartMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveSpeechStartMessageEvent = {\n livespeech_start: \"live.speech_start\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveStartRecordingMessageEvent =\n (typeof CallbackLiveStartRecordingMessageEvent)[keyof typeof CallbackLiveStartRecordingMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveStartRecordingMessageEvent = {\n livestart_recording: \"live.start_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveStartSessionMessageEvent =\n (typeof CallbackLiveStartSessionMessageEvent)[keyof typeof CallbackLiveStartSessionMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveStartSessionMessageEvent = {\n livestart_session: \"live.start_session\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveStopRecordingAckMessageEvent =\n (typeof CallbackLiveStopRecordingAckMessageEvent)[keyof typeof CallbackLiveStopRecordingAckMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveStopRecordingAckMessageEvent = {\n livestop_recording: \"live.stop_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveTranscriptMessageEvent =\n (typeof CallbackLiveTranscriptMessageEvent)[keyof typeof CallbackLiveTranscriptMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveTranscriptMessageEvent = {\n livetranscript: \"live.transcript\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type CallbackLiveTranslationMessageEvent =\n (typeof CallbackLiveTranslationMessageEvent)[keyof typeof CallbackLiveTranslationMessageEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackLiveTranslationMessageEvent = {\n livetranslation: \"live.translation\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * The HTTP method to be used. Allowed values are `POST` or `PUT` (default: `POST`)\n */\nexport type CallbackMethodEnum = (typeof CallbackMethodEnum)[keyof typeof CallbackMethodEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackMethodEnum = {\n POST: \"POST\",\n PUT: \"PUT\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * Type of event\n */\nexport type CallbackTranscriptionErrorPayloadEvent =\n (typeof CallbackTranscriptionErrorPayloadEvent)[keyof typeof CallbackTranscriptionErrorPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackTranscriptionErrorPayloadEvent = {\n transcriptionerror: \"transcription.error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * Type of event\n */\nexport type CallbackTranscriptionSuccessPayloadEvent =\n (typeof CallbackTranscriptionSuccessPayloadEvent)[keyof typeof CallbackTranscriptionSuccessPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const CallbackTranscriptionSuccessPayloadEvent = {\n transcriptionsuccess: \"transcription.success\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type EndRecordingMessageType =\n (typeof EndRecordingMessageType)[keyof typeof EndRecordingMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const EndRecordingMessageType = {\n end_recording: \"end_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type EndSessionMessageType =\n (typeof EndSessionMessageType)[keyof typeof EndSessionMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const EndSessionMessageType = {\n end_session: \"end_session\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type HistoryControllerGetListV1KindItem =\n (typeof HistoryControllerGetListV1KindItem)[keyof typeof HistoryControllerGetListV1KindItem]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const HistoryControllerGetListV1KindItem = {\n \"pre-recorded\": \"pre-recorded\",\n live: \"live\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type HistoryControllerGetListV1StatusItem =\n (typeof HistoryControllerGetListV1StatusItem)[keyof typeof HistoryControllerGetListV1StatusItem]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const HistoryControllerGetListV1StatusItem = {\n queued: \"queued\",\n processing: \"processing\",\n done: \"done\",\n error: \"error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type NamedEntityRecognitionMessageType =\n (typeof NamedEntityRecognitionMessageType)[keyof typeof NamedEntityRecognitionMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const NamedEntityRecognitionMessageType = {\n named_entity_recognition: \"named_entity_recognition\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type PostChapterizationMessageType =\n (typeof PostChapterizationMessageType)[keyof typeof PostChapterizationMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PostChapterizationMessageType = {\n post_chapterization: \"post_chapterization\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type PostFinalTranscriptMessageType =\n (typeof PostFinalTranscriptMessageType)[keyof typeof PostFinalTranscriptMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PostFinalTranscriptMessageType = {\n post_final_transcript: \"post_final_transcript\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type PostSummarizationMessageType =\n (typeof PostSummarizationMessageType)[keyof typeof PostSummarizationMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PostSummarizationMessageType = {\n post_summarization: \"post_summarization\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type PostTranscriptMessageType =\n (typeof PostTranscriptMessageType)[keyof typeof PostTranscriptMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PostTranscriptMessageType = {\n post_transcript: \"post_transcript\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type PreRecordedControllerGetPreRecordedJobsV2StatusItem =\n (typeof PreRecordedControllerGetPreRecordedJobsV2StatusItem)[keyof typeof PreRecordedControllerGetPreRecordedJobsV2StatusItem]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PreRecordedControllerGetPreRecordedJobsV2StatusItem = {\n queued: \"queued\",\n processing: \"processing\",\n done: \"done\",\n error: \"error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type PreRecordedResponseKind =\n (typeof PreRecordedResponseKind)[keyof typeof PreRecordedResponseKind]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PreRecordedResponseKind = {\n \"pre-recorded\": \"pre-recorded\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * \"queued\": the job has been queued. \"processing\": the job is being processed. \"done\": the job has been processed and the result is available. \"error\": an error occurred during the job's processing.\n */\nexport type PreRecordedResponseStatus =\n (typeof PreRecordedResponseStatus)[keyof typeof PreRecordedResponseStatus]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PreRecordedResponseStatus = {\n queued: \"queued\",\n processing: \"processing\",\n done: \"done\",\n error: \"error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type SentimentAnalysisMessageType =\n (typeof SentimentAnalysisMessageType)[keyof typeof SentimentAnalysisMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SentimentAnalysisMessageType = {\n sentiment_analysis: \"sentiment_analysis\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type SpeechEndMessageType = (typeof SpeechEndMessageType)[keyof typeof SpeechEndMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SpeechEndMessageType = {\n speech_end: \"speech_end\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type SpeechStartMessageType =\n (typeof SpeechStartMessageType)[keyof typeof SpeechStartMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SpeechStartMessageType = {\n speech_start: \"speech_start\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type StartRecordingMessageType =\n (typeof StartRecordingMessageType)[keyof typeof StartRecordingMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StartRecordingMessageType = {\n start_recording: \"start_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type StartSessionMessageType =\n (typeof StartSessionMessageType)[keyof typeof StartSessionMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StartSessionMessageType = {\n start_session: \"start_session\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type StopRecordingAckMessageType =\n (typeof StopRecordingAckMessageType)[keyof typeof StopRecordingAckMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StopRecordingAckMessageType = {\n stop_recording: \"stop_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type StopRecordingActionType =\n (typeof StopRecordingActionType)[keyof typeof StopRecordingActionType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StopRecordingActionType = {\n stop_recording: \"stop_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type StreamingControllerGetStreamingJobsV2StatusItem =\n (typeof StreamingControllerGetStreamingJobsV2StatusItem)[keyof typeof StreamingControllerGetStreamingJobsV2StatusItem]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingControllerGetStreamingJobsV2StatusItem = {\n queued: \"queued\",\n processing: \"processing\",\n done: \"done\",\n error: \"error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type StreamingResponseKind =\n (typeof StreamingResponseKind)[keyof typeof StreamingResponseKind]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingResponseKind = {\n live: \"live\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * \"queued\": the job has been queued. \"processing\": the job is being processed. \"done\": the job has been processed and the result is available. \"error\": an error occurred during the job's processing.\n */\nexport type StreamingResponseStatus =\n (typeof StreamingResponseStatus)[keyof typeof StreamingResponseStatus]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingResponseStatus = {\n queued: \"queued\",\n processing: \"processing\",\n done: \"done\",\n error: \"error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * The model used to process the audio. \"solaria-1\" is used by default.\n */\nexport type StreamingSupportedModels =\n (typeof StreamingSupportedModels)[keyof typeof StreamingSupportedModels]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingSupportedModels = {\n \"solaria-1\": \"solaria-1\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type StreamingSupportedRegions =\n (typeof StreamingSupportedRegions)[keyof typeof StreamingSupportedRegions]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const StreamingSupportedRegions = {\n \"us-west\": \"us-west\",\n \"eu-west\": \"eu-west\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * Subtitles formats you want your transcription to be formatted to\n */\nexport type SubtitlesFormatEnum = (typeof SubtitlesFormatEnum)[keyof typeof SubtitlesFormatEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SubtitlesFormatEnum = {\n srt: \"srt\",\n vtt: \"vtt\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * Style of the subtitles. Compliance mode refers to : https://loc.gov/preservation/digital/formats//fdd/fdd000569.shtml#:~:text=SRT%20files%20are%20basic%20text,alongside%2C%20example%3A%20%22MyVideo123\n */\nexport type SubtitlesStyleEnum = (typeof SubtitlesStyleEnum)[keyof typeof SubtitlesStyleEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SubtitlesStyleEnum = {\n default: \"default\",\n compliance: \"compliance\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * The type of summarization to apply\n */\nexport type SummaryTypesEnum = (typeof SummaryTypesEnum)[keyof typeof SummaryTypesEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SummaryTypesEnum = {\n general: \"general\",\n bullet_points: \"bullet_points\",\n concise: \"concise\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type TranscriptionControllerListV2KindItem =\n (typeof TranscriptionControllerListV2KindItem)[keyof typeof TranscriptionControllerListV2KindItem]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptionControllerListV2KindItem = {\n \"pre-recorded\": \"pre-recorded\",\n live: \"live\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type TranscriptionControllerListV2StatusItem =\n (typeof TranscriptionControllerListV2StatusItem)[keyof typeof TranscriptionControllerListV2StatusItem]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptionControllerListV2StatusItem = {\n queued: \"queued\",\n processing: \"processing\",\n done: \"done\",\n error: \"error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * Specify the language in which it will be pronounced when sound comparison occurs. Default to transcription language.\n */\nexport type TranscriptionLanguageCodeEnum =\n (typeof TranscriptionLanguageCodeEnum)[keyof typeof TranscriptionLanguageCodeEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptionLanguageCodeEnum = {\n af: \"af\",\n am: \"am\",\n ar: \"ar\",\n as: \"as\",\n az: \"az\",\n ba: \"ba\",\n be: \"be\",\n bg: \"bg\",\n bn: \"bn\",\n bo: \"bo\",\n br: \"br\",\n bs: \"bs\",\n ca: \"ca\",\n cs: \"cs\",\n cy: \"cy\",\n da: \"da\",\n de: \"de\",\n el: \"el\",\n en: \"en\",\n es: \"es\",\n et: \"et\",\n eu: \"eu\",\n fa: \"fa\",\n fi: \"fi\",\n fo: \"fo\",\n fr: \"fr\",\n gl: \"gl\",\n gu: \"gu\",\n ha: \"ha\",\n haw: \"haw\",\n he: \"he\",\n hi: \"hi\",\n hr: \"hr\",\n ht: \"ht\",\n hu: \"hu\",\n hy: \"hy\",\n id: \"id\",\n is: \"is\",\n it: \"it\",\n ja: \"ja\",\n jw: \"jw\",\n ka: \"ka\",\n kk: \"kk\",\n km: \"km\",\n kn: \"kn\",\n ko: \"ko\",\n la: \"la\",\n lb: \"lb\",\n ln: \"ln\",\n lo: \"lo\",\n lt: \"lt\",\n lv: \"lv\",\n mg: \"mg\",\n mi: \"mi\",\n mk: \"mk\",\n ml: \"ml\",\n mn: \"mn\",\n mr: \"mr\",\n ms: \"ms\",\n mt: \"mt\",\n my: \"my\",\n ne: \"ne\",\n nl: \"nl\",\n nn: \"nn\",\n no: \"no\",\n oc: \"oc\",\n pa: \"pa\",\n pl: \"pl\",\n ps: \"ps\",\n pt: \"pt\",\n ro: \"ro\",\n ru: \"ru\",\n sa: \"sa\",\n sd: \"sd\",\n si: \"si\",\n sk: \"sk\",\n sl: \"sl\",\n sn: \"sn\",\n so: \"so\",\n sq: \"sq\",\n sr: \"sr\",\n su: \"su\",\n sv: \"sv\",\n sw: \"sw\",\n ta: \"ta\",\n te: \"te\",\n tg: \"tg\",\n th: \"th\",\n tk: \"tk\",\n tl: \"tl\",\n tr: \"tr\",\n tt: \"tt\",\n uk: \"uk\",\n ur: \"ur\",\n uz: \"uz\",\n vi: \"vi\",\n yi: \"yi\",\n yo: \"yo\",\n zh: \"zh\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type TranscriptMessageType =\n (typeof TranscriptMessageType)[keyof typeof TranscriptMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptMessageType = {\n transcript: \"transcript\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * Target language in `iso639-1` format you want the transcription translated to\n */\nexport type TranslationLanguageCodeEnum =\n (typeof TranslationLanguageCodeEnum)[keyof typeof TranslationLanguageCodeEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranslationLanguageCodeEnum = {\n af: \"af\",\n am: \"am\",\n ar: \"ar\",\n as: \"as\",\n az: \"az\",\n ba: \"ba\",\n be: \"be\",\n bg: \"bg\",\n bn: \"bn\",\n bo: \"bo\",\n br: \"br\",\n bs: \"bs\",\n ca: \"ca\",\n cs: \"cs\",\n cy: \"cy\",\n da: \"da\",\n de: \"de\",\n el: \"el\",\n en: \"en\",\n es: \"es\",\n et: \"et\",\n eu: \"eu\",\n fa: \"fa\",\n fi: \"fi\",\n fo: \"fo\",\n fr: \"fr\",\n gl: \"gl\",\n gu: \"gu\",\n ha: \"ha\",\n haw: \"haw\",\n he: \"he\",\n hi: \"hi\",\n hr: \"hr\",\n ht: \"ht\",\n hu: \"hu\",\n hy: \"hy\",\n id: \"id\",\n is: \"is\",\n it: \"it\",\n ja: \"ja\",\n jw: \"jw\",\n ka: \"ka\",\n kk: \"kk\",\n km: \"km\",\n kn: \"kn\",\n ko: \"ko\",\n la: \"la\",\n lb: \"lb\",\n ln: \"ln\",\n lo: \"lo\",\n lt: \"lt\",\n lv: \"lv\",\n mg: \"mg\",\n mi: \"mi\",\n mk: \"mk\",\n ml: \"ml\",\n mn: \"mn\",\n mr: \"mr\",\n ms: \"ms\",\n mt: \"mt\",\n my: \"my\",\n ne: \"ne\",\n nl: \"nl\",\n nn: \"nn\",\n no: \"no\",\n oc: \"oc\",\n pa: \"pa\",\n pl: \"pl\",\n ps: \"ps\",\n pt: \"pt\",\n ro: \"ro\",\n ru: \"ru\",\n sa: \"sa\",\n sd: \"sd\",\n si: \"si\",\n sk: \"sk\",\n sl: \"sl\",\n sn: \"sn\",\n so: \"so\",\n sq: \"sq\",\n sr: \"sr\",\n su: \"su\",\n sv: \"sv\",\n sw: \"sw\",\n ta: \"ta\",\n te: \"te\",\n tg: \"tg\",\n th: \"th\",\n tk: \"tk\",\n tl: \"tl\",\n tr: \"tr\",\n tt: \"tt\",\n uk: \"uk\",\n ur: \"ur\",\n uz: \"uz\",\n vi: \"vi\",\n wo: \"wo\",\n yi: \"yi\",\n yo: \"yo\",\n zh: \"zh\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type TranslationMessageType =\n (typeof TranslationMessageType)[keyof typeof TranslationMessageType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranslationMessageType = {\n translation: \"translation\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\n/**\n * Model you want the translation model to use to translate\n */\nexport type TranslationModelEnum = (typeof TranslationModelEnum)[keyof typeof TranslationModelEnum]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranslationModelEnum = {\n base: \"base\",\n enhanced: \"enhanced\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type VideoToTextControllerVideoTranscriptionBodyLanguage =\n (typeof VideoToTextControllerVideoTranscriptionBodyLanguage)[keyof typeof VideoToTextControllerVideoTranscriptionBodyLanguage]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const VideoToTextControllerVideoTranscriptionBodyLanguage = {\n afrikaans: \"afrikaans\",\n albanian: \"albanian\",\n amharic: \"amharic\",\n arabic: \"arabic\",\n armenian: \"armenian\",\n assamese: \"assamese\",\n azerbaijani: \"azerbaijani\",\n bashkir: \"bashkir\",\n basque: \"basque\",\n belarusian: \"belarusian\",\n bengali: \"bengali\",\n bosnian: \"bosnian\",\n breton: \"breton\",\n bulgarian: \"bulgarian\",\n catalan: \"catalan\",\n chinese: \"chinese\",\n croatian: \"croatian\",\n czech: \"czech\",\n danish: \"danish\",\n dutch: \"dutch\",\n english: \"english\",\n estonian: \"estonian\",\n faroese: \"faroese\",\n finnish: \"finnish\",\n french: \"french\",\n galician: \"galician\",\n georgian: \"georgian\",\n german: \"german\",\n greek: \"greek\",\n gujarati: \"gujarati\",\n haitian_creole: \"haitian creole\",\n hausa: \"hausa\",\n hawaiian: \"hawaiian\",\n hebrew: \"hebrew\",\n hindi: \"hindi\",\n hungarian: \"hungarian\",\n icelandic: \"icelandic\",\n indonesian: \"indonesian\",\n italian: \"italian\",\n japanese: \"japanese\",\n javanese: \"javanese\",\n kannada: \"kannada\",\n kazakh: \"kazakh\",\n khmer: \"khmer\",\n korean: \"korean\",\n lao: \"lao\",\n latin: \"latin\",\n latvian: \"latvian\",\n lingala: \"lingala\",\n lithuanian: \"lithuanian\",\n luxembourgish: \"luxembourgish\",\n macedonian: \"macedonian\",\n malagasy: \"malagasy\",\n malay: \"malay\",\n malayalam: \"malayalam\",\n maltese: \"maltese\",\n maori: \"maori\",\n marathi: \"marathi\",\n mongolian: \"mongolian\",\n myanmar: \"myanmar\",\n nepali: \"nepali\",\n norwegian: \"norwegian\",\n nynorsk: \"nynorsk\",\n occitan: \"occitan\",\n pashto: \"pashto\",\n persian: \"persian\",\n polish: \"polish\",\n portuguese: \"portuguese\",\n punjabi: \"punjabi\",\n romanian: \"romanian\",\n russian: \"russian\",\n sanskrit: \"sanskrit\",\n serbian: \"serbian\",\n shona: \"shona\",\n sindhi: \"sindhi\",\n sinhala: \"sinhala\",\n slovak: \"slovak\",\n slovenian: \"slovenian\",\n somali: \"somali\",\n spanish: \"spanish\",\n sundanese: \"sundanese\",\n swahili: \"swahili\",\n swedish: \"swedish\",\n tagalog: \"tagalog\",\n tajik: \"tajik\",\n tamil: \"tamil\",\n tatar: \"tatar\",\n telugu: \"telugu\",\n thai: \"thai\",\n tibetan: \"tibetan\",\n turkish: \"turkish\",\n turkmen: \"turkmen\",\n ukrainian: \"ukrainian\",\n urdu: \"urdu\",\n uzbek: \"uzbek\",\n vietnamese: \"vietnamese\",\n welsh: \"welsh\",\n yiddish: \"yiddish\",\n yoruba: \"yoruba\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type VideoToTextControllerVideoTranscriptionBodyLanguageBehaviour =\n (typeof VideoToTextControllerVideoTranscriptionBodyLanguageBehaviour)[keyof typeof VideoToTextControllerVideoTranscriptionBodyLanguageBehaviour]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const VideoToTextControllerVideoTranscriptionBodyLanguageBehaviour = {\n automatic_single_language: \"automatic single language\",\n automatic_multiple_languages: \"automatic multiple languages\",\n manual: \"manual\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type VideoToTextControllerVideoTranscriptionBodyOutputFormat =\n (typeof VideoToTextControllerVideoTranscriptionBodyOutputFormat)[keyof typeof VideoToTextControllerVideoTranscriptionBodyOutputFormat]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const VideoToTextControllerVideoTranscriptionBodyOutputFormat = {\n json: \"json\",\n srt: \"srt\",\n vtt: \"vtt\",\n plain: \"plain\",\n txt: \"txt\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type VideoToTextControllerVideoTranscriptionBodyTargetTranslationLanguage =\n (typeof VideoToTextControllerVideoTranscriptionBodyTargetTranslationLanguage)[keyof typeof VideoToTextControllerVideoTranscriptionBodyTargetTranslationLanguage]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const VideoToTextControllerVideoTranscriptionBodyTargetTranslationLanguage = {\n afrikaans: \"afrikaans\",\n albanian: \"albanian\",\n amharic: \"amharic\",\n arabic: \"arabic\",\n armenian: \"armenian\",\n assamese: \"assamese\",\n azerbaijani: \"azerbaijani\",\n bashkir: \"bashkir\",\n basque: \"basque\",\n belarusian: \"belarusian\",\n bengali: \"bengali\",\n bosnian: \"bosnian\",\n breton: \"breton\",\n bulgarian: \"bulgarian\",\n catalan: \"catalan\",\n chinese: \"chinese\",\n croatian: \"croatian\",\n czech: \"czech\",\n danish: \"danish\",\n dutch: \"dutch\",\n english: \"english\",\n estonian: \"estonian\",\n faroese: \"faroese\",\n finnish: \"finnish\",\n french: \"french\",\n galician: \"galician\",\n georgian: \"georgian\",\n german: \"german\",\n greek: \"greek\",\n gujarati: \"gujarati\",\n haitian_creole: \"haitian creole\",\n hausa: \"hausa\",\n hawaiian: \"hawaiian\",\n hebrew: \"hebrew\",\n hindi: \"hindi\",\n hungarian: \"hungarian\",\n icelandic: \"icelandic\",\n indonesian: \"indonesian\",\n italian: \"italian\",\n japanese: \"japanese\",\n javanese: \"javanese\",\n kannada: \"kannada\",\n kazakh: \"kazakh\",\n khmer: \"khmer\",\n korean: \"korean\",\n lao: \"lao\",\n latin: \"latin\",\n latvian: \"latvian\",\n lingala: \"lingala\",\n lithuanian: \"lithuanian\",\n luxembourgish: \"luxembourgish\",\n macedonian: \"macedonian\",\n malagasy: \"malagasy\",\n malay: \"malay\",\n malayalam: \"malayalam\",\n maltese: \"maltese\",\n maori: \"maori\",\n marathi: \"marathi\",\n mongolian: \"mongolian\",\n myanmar: \"myanmar\",\n nepali: \"nepali\",\n norwegian: \"norwegian\",\n nynorsk: \"nynorsk\",\n occitan: \"occitan\",\n pashto: \"pashto\",\n persian: \"persian\",\n polish: \"polish\",\n portuguese: \"portuguese\",\n punjabi: \"punjabi\",\n romanian: \"romanian\",\n russian: \"russian\",\n sanskrit: \"sanskrit\",\n serbian: \"serbian\",\n shona: \"shona\",\n sindhi: \"sindhi\",\n sinhala: \"sinhala\",\n slovak: \"slovak\",\n slovenian: \"slovenian\",\n somali: \"somali\",\n spanish: \"spanish\",\n sundanese: \"sundanese\",\n swahili: \"swahili\",\n swedish: \"swedish\",\n tagalog: \"tagalog\",\n tajik: \"tajik\",\n tamil: \"tamil\",\n tatar: \"tatar\",\n telugu: \"telugu\",\n thai: \"thai\",\n tibetan: \"tibetan\",\n turkish: \"turkish\",\n turkmen: \"turkmen\",\n ukrainian: \"ukrainian\",\n urdu: \"urdu\",\n uzbek: \"uzbek\",\n vietnamese: \"vietnamese\",\n welsh: \"welsh\",\n wolof: \"wolof\",\n yiddish: \"yiddish\",\n yoruba: \"yoruba\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type WebhookLiveEndRecordingPayloadEvent =\n (typeof WebhookLiveEndRecordingPayloadEvent)[keyof typeof WebhookLiveEndRecordingPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const WebhookLiveEndRecordingPayloadEvent = {\n liveend_recording: \"live.end_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type WebhookLiveEndSessionPayloadEvent =\n (typeof WebhookLiveEndSessionPayloadEvent)[keyof typeof WebhookLiveEndSessionPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const WebhookLiveEndSessionPayloadEvent = {\n liveend_session: \"live.end_session\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type WebhookLiveStartRecordingPayloadEvent =\n (typeof WebhookLiveStartRecordingPayloadEvent)[keyof typeof WebhookLiveStartRecordingPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const WebhookLiveStartRecordingPayloadEvent = {\n livestart_recording: \"live.start_recording\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type WebhookLiveStartSessionPayloadEvent =\n (typeof WebhookLiveStartSessionPayloadEvent)[keyof typeof WebhookLiveStartSessionPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const WebhookLiveStartSessionPayloadEvent = {\n livestart_session: \"live.start_session\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type WebhookTranscriptionCreatedPayloadEvent =\n (typeof WebhookTranscriptionCreatedPayloadEvent)[keyof typeof WebhookTranscriptionCreatedPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const WebhookTranscriptionCreatedPayloadEvent = {\n transcriptioncreated: \"transcription.created\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type WebhookTranscriptionErrorPayloadEvent =\n (typeof WebhookTranscriptionErrorPayloadEvent)[keyof typeof WebhookTranscriptionErrorPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const WebhookTranscriptionErrorPayloadEvent = {\n transcriptionerror: \"transcription.error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Gladia Control API\n * OpenAPI spec version: 1.0\n */\n\nexport type WebhookTranscriptionSuccessPayloadEvent =\n (typeof WebhookTranscriptionSuccessPayloadEvent)[keyof typeof WebhookTranscriptionSuccessPayloadEvent]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const WebhookTranscriptionSuccessPayloadEvent = {\n transcriptionsuccess: \"transcription.success\"\n} as const\n","/**\n * AssemblyAI transcription provider adapter\n * Documentation: https://www.assemblyai.com/docs\n */\n\nimport axios from \"axios\"\nimport WebSocket from \"ws\"\nimport type {\n AudioChunk,\n AudioInput,\n ProviderCapabilities,\n StreamingCallbacks,\n StreamingOptions,\n StreamingSession,\n TranscribeOptions,\n UnifiedTranscriptResponse\n} from \"../router/types\"\nimport { BaseAdapter, type ProviderConfig } from \"./base-adapter\"\nimport { mapEncodingToProvider } from \"../router/audio-encoding-types\"\n\n// Import generated API client functions - FULL TYPE SAFETY!\nimport {\n createTranscript,\n getTranscript as getTranscriptAPI,\n createTemporaryToken\n} from \"../generated/assemblyai/api/assemblyAIAPI\"\n\n// Import AssemblyAI generated types\nimport type { Transcript } from \"../generated/assemblyai/schema/transcript\"\nimport type { TranscriptParams } from \"../generated/assemblyai/schema/transcriptParams\"\nimport type { TranscriptStatus } from \"../generated/assemblyai/schema/transcriptStatus\"\nimport type { TranscriptWord } from \"../generated/assemblyai/schema/transcriptWord\"\nimport type { TranscriptUtterance } from \"../generated/assemblyai/schema/transcriptUtterance\"\n\n// Import AssemblyAI v3 Streaming types (auto-synced from SDK)\nimport type {\n BeginEvent,\n TurnEvent,\n TerminationEvent,\n ErrorEvent,\n StreamingEventMessage,\n StreamingWord\n} from \"../generated/assemblyai/streaming-types\"\n\n/**\n * AssemblyAI transcription provider adapter\n *\n * Implements transcription for the AssemblyAI API with support for:\n * - Synchronous and asynchronous transcription\n * - Speaker diarization (speaker labels)\n * - Multi-language detection and transcription\n * - Summarization and sentiment analysis\n * - Entity detection and content moderation\n * - Custom vocabulary and spelling\n * - Word-level timestamps\n * - PII redaction\n *\n * @see https://www.assemblyai.com/docs AssemblyAI API Documentation\n *\n * @example Basic transcription\n * ```typescript\n * import { AssemblyAIAdapter } from '@meeting-baas/sdk';\n *\n * const adapter = new AssemblyAIAdapter();\n * adapter.initialize({\n * apiKey: process.env.ASSEMBLYAI_API_KEY\n * });\n *\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en',\n * diarization: true\n * });\n *\n * console.log(result.data.text);\n * console.log(result.data.speakers);\n * ```\n *\n * @example With advanced features\n * ```typescript\n * const result = await adapter.transcribe(audio, {\n * language: 'en_us',\n * diarization: true,\n * summarization: true,\n * sentimentAnalysis: true,\n * entityDetection: true,\n * piiRedaction: true\n * });\n *\n * console.log('Summary:', result.data.summary);\n * console.log('Entities:', result.data.metadata?.entities);\n * ```\n */\nexport class AssemblyAIAdapter extends BaseAdapter {\n readonly name = \"assemblyai\" as const\n readonly capabilities: ProviderCapabilities = {\n streaming: true,\n diarization: true,\n wordTimestamps: true,\n languageDetection: true,\n customVocabulary: true,\n summarization: true,\n sentimentAnalysis: true,\n entityDetection: true,\n piiRedaction: true\n }\n\n protected baseUrl = \"https://api.assemblyai.com\" // Generated functions already include /v2 path\n private wsBaseUrl = \"wss://streaming.assemblyai.com/v3/ws\" // v3 Universal Streaming endpoint\n\n /**\n * Get axios config for generated API client functions\n * Configures headers and base URL using authorization header\n */\n protected getAxiosConfig() {\n return super.getAxiosConfig(\"authorization\")\n }\n\n /**\n * Submit audio for transcription\n *\n * Sends audio to AssemblyAI API for transcription. If a webhook URL is provided,\n * returns immediately with the job ID. Otherwise, polls until completion.\n *\n * @param audio - Audio input (currently only URL type supported)\n * @param options - Transcription options\n * @param options.language - Language code (e.g., 'en', 'en_us', 'es', 'fr')\n * @param options.languageDetection - Enable automatic language detection\n * @param options.diarization - Enable speaker identification (speaker_labels)\n * @param options.speakersExpected - Number of expected speakers\n * @param options.summarization - Generate text summary\n * @param options.sentimentAnalysis - Analyze sentiment of transcription\n * @param options.entityDetection - Detect named entities (people, places, etc.)\n * @param options.piiRedaction - Redact personally identifiable information\n * @param options.customVocabulary - Words to boost in recognition\n * @param options.webhookUrl - Callback URL for async results\n * @returns Normalized transcription response\n * @throws {Error} If audio type is not 'url' (file/stream not yet supported)\n *\n * @example Simple transcription\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * });\n * ```\n *\n * @example With advanced features\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * }, {\n * language: 'en_us',\n * diarization: true,\n * speakersExpected: 3,\n * summarization: true,\n * sentimentAnalysis: true,\n * entityDetection: true,\n * customVocabulary: ['API', 'TypeScript', 'JavaScript']\n * });\n * ```\n *\n * @example With webhook (returns transcript ID immediately for polling)\n * ```typescript\n * // Submit transcription with webhook\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * }, {\n * webhookUrl: 'https://myapp.com/webhook/transcription',\n * language: 'en_us'\n * });\n *\n * // Get transcript ID for polling\n * const transcriptId = result.data?.id;\n * console.log('Transcript ID:', transcriptId); // Use this to poll for status\n *\n * // Later: Poll for completion (if webhook fails or you want to check)\n * const status = await adapter.getTranscript(transcriptId);\n * if (status.data?.status === 'completed') {\n * console.log('Transcript:', status.data.text);\n * }\n * ```\n */\n async transcribe(\n audio: AudioInput,\n options?: TranscribeOptions\n ): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Build typed request using generated types\n const request = this.buildTranscriptionRequest(audio, options)\n\n // Use generated API client function - FULLY TYPED!\n const response = await createTranscript(request, this.getAxiosConfig())\n\n const transcriptId = response.data.id\n\n // If webhook is provided, return immediately with job ID\n if (options?.webhookUrl) {\n return {\n success: true,\n provider: this.name,\n data: {\n id: transcriptId,\n text: \"\",\n status: \"queued\"\n },\n raw: response.data\n }\n }\n\n // Otherwise, poll for results\n return await this.pollForCompletion(transcriptId)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Get transcription result by ID\n */\n async getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Use generated API client function - FULLY TYPED!\n const response = await getTranscriptAPI(transcriptId, this.getAxiosConfig())\n\n return this.normalizeResponse(response.data)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Build AssemblyAI transcription request from unified options\n */\n private buildTranscriptionRequest(\n audio: AudioInput,\n options?: TranscribeOptions\n ): TranscriptParams {\n // Get audio URL\n let audioUrl: string\n if (audio.type === \"url\") {\n audioUrl = audio.url\n } else {\n throw new Error(\n \"AssemblyAI adapter currently only supports URL-based audio input. Use audio.type='url'\"\n )\n }\n\n const request: TranscriptParams = {\n audio_url: audioUrl\n }\n\n // Map options to AssemblyAI format\n if (options) {\n // Language configuration\n if (options.language) {\n // Convert ISO codes to AssemblyAI format (e.g., 'en' -> 'en_us')\n const languageCode = options.language.includes(\"_\")\n ? options.language\n : `${options.language}_us`\n request.language_code = languageCode\n }\n\n if (options.languageDetection) {\n request.language_detection = true\n }\n\n // Speaker diarization\n if (options.diarization) {\n request.speaker_labels = true\n if (options.speakersExpected) {\n request.speakers_expected = options.speakersExpected\n }\n }\n\n // Custom vocabulary (word boost)\n if (options.customVocabulary && options.customVocabulary.length > 0) {\n request.word_boost = options.customVocabulary\n request.boost_param = \"high\" // default to high boost\n }\n\n // Summarization\n if (options.summarization) {\n request.summarization = true\n request.summary_model = \"informative\"\n request.summary_type = \"bullets\"\n }\n\n // Sentiment analysis\n if (options.sentimentAnalysis) {\n request.sentiment_analysis = true\n }\n\n // Entity detection\n if (options.entityDetection) {\n request.entity_detection = true\n }\n\n // PII redaction\n if (options.piiRedaction) {\n request.redact_pii = true\n }\n\n // Webhook callback\n if (options.webhookUrl) {\n request.webhook_url = options.webhookUrl\n }\n\n // Enable word timestamps by default (AssemblyAI includes them automatically)\n // Enable punctuation and formatting for better results\n request.punctuate = true\n request.format_text = true\n }\n\n return request\n }\n\n /**\n * Normalize AssemblyAI response to unified format\n */\n private normalizeResponse(response: Transcript): UnifiedTranscriptResponse {\n // Map AssemblyAI status to unified status\n let status: \"queued\" | \"processing\" | \"completed\" | \"error\"\n switch (response.status) {\n case \"queued\":\n status = \"queued\"\n break\n case \"processing\":\n status = \"processing\"\n break\n case \"completed\":\n status = \"completed\"\n break\n case \"error\":\n status = \"error\"\n break\n default:\n status = \"queued\"\n }\n\n // Handle error state\n if (response.status === \"error\") {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"TRANSCRIPTION_ERROR\",\n message: response.error || \"Transcription failed\"\n },\n raw: response\n }\n }\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: response.id,\n text: response.text || \"\",\n confidence: response.confidence !== null ? response.confidence : undefined,\n status,\n language: response.language_code,\n duration: response.audio_duration ? response.audio_duration / 1000 : undefined, // Convert ms to seconds\n speakers: this.extractSpeakers(response),\n words: this.extractWords(response),\n utterances: this.extractUtterances(response),\n summary: response.summary || undefined,\n metadata: {\n audioUrl: response.audio_url,\n entities: response.entities,\n sentimentAnalysis: response.sentiment_analysis_results,\n contentModeration: response.content_safety_labels\n }\n },\n raw: response\n }\n }\n\n /**\n * Extract speaker information from AssemblyAI response\n */\n private extractSpeakers(transcript: Transcript) {\n if (!transcript.utterances || transcript.utterances.length === 0) {\n return undefined\n }\n\n // Extract unique speakers from utterances\n const speakerSet = new Set<string>()\n transcript.utterances.forEach((utterance: TranscriptUtterance) => {\n if (utterance.speaker) {\n speakerSet.add(utterance.speaker)\n }\n })\n\n if (speakerSet.size === 0) {\n return undefined\n }\n\n return Array.from(speakerSet).map((speakerId) => ({\n id: speakerId,\n label: speakerId // AssemblyAI uses format like \"A\", \"B\", \"C\"\n }))\n }\n\n /**\n * Extract word timestamps from AssemblyAI response\n */\n private extractWords(transcript: Transcript) {\n if (!transcript.words || transcript.words.length === 0) {\n return undefined\n }\n\n return transcript.words.map((word: TranscriptWord) => ({\n text: word.text,\n start: word.start / 1000, // Convert ms to seconds\n end: word.end / 1000, // Convert ms to seconds\n confidence: word.confidence,\n speaker: word.speaker || undefined\n }))\n }\n\n /**\n * Extract utterances from AssemblyAI response\n */\n private extractUtterances(transcript: Transcript) {\n if (!transcript.utterances || transcript.utterances.length === 0) {\n return undefined\n }\n\n return transcript.utterances.map((utterance: TranscriptUtterance) => ({\n text: utterance.text,\n start: utterance.start / 1000, // Convert ms to seconds\n end: utterance.end / 1000, // Convert ms to seconds\n speaker: utterance.speaker || undefined,\n confidence: utterance.confidence,\n words: utterance.words.map((word: TranscriptWord) => ({\n text: word.text,\n start: word.start / 1000,\n end: word.end / 1000,\n confidence: word.confidence\n }))\n }))\n }\n\n /**\n * Stream audio for real-time transcription\n *\n * Creates a WebSocket connection to AssemblyAI for streaming transcription.\n * First obtains a temporary token, then connects and streams audio chunks.\n *\n * @param options - Streaming configuration options\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n *\n * @example Real-time streaming\n * ```typescript\n * const session = await adapter.transcribeStream({\n * encoding: 'pcm_s16le',\n * sampleRate: 16000,\n * language: 'en',\n * interimResults: true\n * }, {\n * onOpen: () => console.log('Connected'),\n * onTranscript: (event) => {\n * if (event.isFinal) {\n * console.log('Final:', event.text);\n * } else {\n * console.log('Interim:', event.text);\n * }\n * },\n * onError: (error) => console.error('Error:', error),\n * onClose: () => console.log('Disconnected')\n * });\n *\n * // Send audio chunks\n * const audioChunk = getAudioChunk(); // Your audio source\n * await session.sendAudio({ data: audioChunk });\n *\n * // Close when done\n * await session.close();\n * ```\n */\n async transcribeStream(\n options?: StreamingOptions,\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession> {\n this.validateConfig()\n\n if (!this.config?.apiKey) {\n throw new Error(\"API key is required for streaming\")\n }\n\n // Step 1: Build WebSocket URL with parameters\n // v3 supports authentication via API key header (no token needed)\n const sampleRate = options?.sampleRate || 16000\n // Map unified encoding format to AssemblyAI-specific format\n const encoding = options?.encoding\n ? mapEncodingToProvider(options.encoding, \"assemblyai\")\n : \"pcm_s16le\"\n const wsUrl = `${this.wsBaseUrl}?sample_rate=${sampleRate}&encoding=${encoding}`\n\n // Step 2: Create WebSocket connection with API key in headers\n const ws = new WebSocket(wsUrl, {\n headers: {\n Authorization: this.config.apiKey\n }\n })\n\n let sessionStatus: \"connecting\" | \"open\" | \"closing\" | \"closed\" = \"connecting\"\n const sessionId = `assemblyai-${Date.now()}-${Math.random().toString(36).substring(7)}`\n\n // Handle WebSocket events\n ws.on(\"open\", () => {\n sessionStatus = \"open\"\n callbacks?.onOpen?.()\n })\n\n ws.on(\"message\", (data: Buffer) => {\n try {\n const message = JSON.parse(data.toString()) as StreamingEventMessage\n\n // Handle different message types from AssemblyAI v3 - TYPE SAFE!\n // Check for error first (it doesn't have a 'type' field)\n if (\"error\" in message) {\n // Type narrowed to ErrorEvent\n callbacks?.onError?.({\n code: \"API_ERROR\",\n message: (message as ErrorEvent).error\n })\n return\n }\n\n // Now we know it has a 'type' field\n if ((message as BeginEvent | TurnEvent | TerminationEvent).type === \"Begin\") {\n // Type narrowed to BeginEvent\n const beginMsg = message as BeginEvent\n callbacks?.onMetadata?.({\n sessionId: beginMsg.id,\n expiresAt: new Date(beginMsg.expires_at).toISOString()\n })\n } else if ((message as BeginEvent | TurnEvent | TerminationEvent).type === \"Turn\") {\n // Type narrowed to TurnEvent\n const turnMsg = message as TurnEvent\n // v3 uses a single \"Turn\" event with end_of_turn flag instead of PartialTranscript/FinalTranscript\n callbacks?.onTranscript?.({\n type: \"transcript\",\n text: turnMsg.transcript,\n isFinal: turnMsg.end_of_turn,\n confidence: turnMsg.end_of_turn_confidence,\n words: turnMsg.words.map((word: StreamingWord) => ({\n text: word.text,\n start: word.start / 1000, // Convert ms to seconds\n end: word.end / 1000,\n confidence: word.confidence\n })),\n data: turnMsg\n })\n } else if ((message as BeginEvent | TurnEvent | TerminationEvent).type === \"Termination\") {\n // Type narrowed to TerminationEvent\n const termMsg = message as TerminationEvent\n callbacks?.onMetadata?.({\n terminated: true,\n audioDurationSeconds: termMsg.audio_duration_seconds,\n sessionDurationSeconds: termMsg.session_duration_seconds\n })\n }\n } catch (error) {\n callbacks?.onError?.({\n code: \"PARSE_ERROR\",\n message: \"Failed to parse WebSocket message\",\n details: error\n })\n }\n })\n\n ws.on(\"error\", (error: Error) => {\n callbacks?.onError?.({\n code: \"WEBSOCKET_ERROR\",\n message: error.message,\n details: error\n })\n })\n\n ws.on(\"close\", (code: number, reason: Buffer) => {\n sessionStatus = \"closed\"\n callbacks?.onClose?.(code, reason.toString())\n })\n\n // Wait for connection to open\n await new Promise<void>((resolve, reject) => {\n const timeout = setTimeout(() => {\n reject(new Error(\"WebSocket connection timeout\"))\n }, 10000)\n\n ws.once(\"open\", () => {\n clearTimeout(timeout)\n resolve()\n })\n\n ws.once(\"error\", (error) => {\n clearTimeout(timeout)\n reject(error)\n })\n })\n\n // Return StreamingSession interface\n return {\n id: sessionId,\n provider: this.name,\n createdAt: new Date(),\n getStatus: () => sessionStatus,\n sendAudio: async (chunk: AudioChunk) => {\n if (sessionStatus !== \"open\") {\n throw new Error(`Cannot send audio: session is ${sessionStatus}`)\n }\n\n if (ws.readyState !== WebSocket.OPEN) {\n throw new Error(\"WebSocket is not open\")\n }\n\n // AssemblyAI expects base64-encoded audio data\n const base64Audio = chunk.data.toString(\"base64\")\n\n // Send audio data as JSON message\n ws.send(\n JSON.stringify({\n audio_data: base64Audio\n })\n )\n\n // Send termination message if this is the last chunk\n if (chunk.isLast) {\n ws.send(\n JSON.stringify({\n terminate_session: true\n })\n )\n }\n },\n close: async () => {\n if (sessionStatus === \"closed\" || sessionStatus === \"closing\") {\n return\n }\n\n sessionStatus = \"closing\"\n\n // Send termination message before closing\n if (ws.readyState === WebSocket.OPEN) {\n ws.send(\n JSON.stringify({\n terminate_session: true\n })\n )\n }\n\n // Close WebSocket\n return new Promise<void>((resolve) => {\n const timeout = setTimeout(() => {\n ws.terminate()\n resolve()\n }, 5000)\n\n ws.close()\n\n ws.once(\"close\", () => {\n clearTimeout(timeout)\n sessionStatus = \"closed\"\n resolve()\n })\n })\n }\n }\n }\n}\n\n/**\n * Factory function to create an AssemblyAI adapter\n */\nexport function createAssemblyAIAdapter(config: ProviderConfig): AssemblyAIAdapter {\n const adapter = new AssemblyAIAdapter()\n adapter.initialize(config)\n return adapter\n}\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\nimport { faker } from \"@faker-js/faker\"\nimport type { AxiosRequestConfig, AxiosResponse } from \"axios\"\nimport axios from \"axios\"\nimport { delay, HttpResponse, http } from \"msw\"\nimport type {\n AutoHighlightsResult,\n ContentSafetyLabelsResult,\n CreateRealtimeTemporaryTokenParams,\n GetSubtitlesParams,\n LemurQuestionAnswerParams,\n LemurQuestionAnswerResponse,\n LemurResponse,\n LemurStringResponse,\n LemurSummaryParams,\n LemurSummaryResponse,\n LemurTaskParams,\n LemurTaskResponse,\n ListTranscriptsParams,\n ParagraphsResponse,\n PurgeLemurRequestDataResponse,\n RealtimeTemporaryTokenResponse,\n RedactedAudioResponse,\n SentencesResponse,\n SubtitleFormat,\n TopicDetectionModelResult,\n Transcript,\n TranscriptList,\n TranscriptParams,\n UploadedFile,\n WordSearchParams,\n WordSearchResponse\n} from \"../schema\"\nimport {\n AudioIntelligenceModelStatus,\n RedactedAudioStatus,\n RedactPiiAudioQuality,\n SpeechModel,\n SubstitutionPolicy,\n TranscriptLanguageCode,\n TranscriptStatus\n} from \"../schema\"\n\n/**\n * <Note>To upload a media file to our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nUpload a media file to AssemblyAI's servers.\n\n * @summary Upload a media file\n */\nexport const uploadFile = <TData = AxiosResponse<UploadedFile>>(\n uploadFileBody: Blob,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/v2/upload\", uploadFileBody, options)\n}\n\n/**\n * <Note>To use our EU server for transcription, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nCreate a transcript from a media file that is accessible via a URL.\n\n * @summary Transcribe audio\n */\nexport const createTranscript = <TData = AxiosResponse<Transcript>>(\n transcriptParams: TranscriptParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/v2/transcript\", transcriptParams, options)\n}\n\n/**\n * <Note>To retrieve your transcriptions on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nRetrieve a list of transcripts you created. \nTranscripts are sorted from newest to oldest and can be retrieved for the last 90 days of usage. The previous URL always points to a page with older transcripts.\n\nIf you need to retrieve transcripts from more than 90 days ago please reach out to our Support team at support@assemblyai.com.\n\n * @summary List transcripts\n */\nexport const listTranscripts = <TData = AxiosResponse<TranscriptList>>(\n params?: ListTranscriptsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/v2/transcript\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * <Note>To retrieve your transcriptions on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nGet the transcript resource. The transcript is ready when the \"status\" is \"completed\".\n\n * @summary Get transcript\n */\nexport const getTranscript = <TData = AxiosResponse<Transcript>>(\n transcriptId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcript/${transcriptId}`, options)\n}\n\n/**\n * <Note>To delete your transcriptions on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nRemove the data from the transcript and mark it as deleted.\n\n * @summary Delete transcript\n */\nexport const deleteTranscript = <TData = AxiosResponse<Transcript>>(\n transcriptId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/v2/transcript/${transcriptId}`, options)\n}\n\n/**\n * <Note>To retrieve your transcriptions on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nExport your transcript in SRT or VTT format to use with a video player for subtitles and closed captions.\n\n * @summary Get subtitles for transcript\n */\nexport const getSubtitles = <TData = AxiosResponse<string>>(\n transcriptId: string,\n subtitleFormat: SubtitleFormat,\n params?: GetSubtitlesParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcript/${transcriptId}/${subtitleFormat}`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * <Note>To retrieve your transcriptions on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nGet the transcript split by sentences. The API will attempt to semantically segment the transcript into sentences to create more reader-friendly transcripts.\n\n * @summary Get sentences in transcript\n */\nexport const getTranscriptSentences = <TData = AxiosResponse<SentencesResponse>>(\n transcriptId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcript/${transcriptId}/sentences`, options)\n}\n\n/**\n * <Note>To retrieve your transcriptions on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nGet the transcript split by paragraphs. The API will attempt to semantically segment your transcript into paragraphs to create more reader-friendly transcripts.\n\n * @summary Get paragraphs in transcript\n */\nexport const getTranscriptParagraphs = <TData = AxiosResponse<ParagraphsResponse>>(\n transcriptId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcript/${transcriptId}/paragraphs`, options)\n}\n\n/**\n * <Note>To search through a transcription created on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nSearch through the transcript for keywords. You can search for individual words, numbers, or phrases containing up to five words or numbers.\n\n * @summary Search words in transcript\n */\nexport const wordSearch = <TData = AxiosResponse<WordSearchResponse>>(\n transcriptId: string,\n params: WordSearchParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcript/${transcriptId}/word-search`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * <Note>To retrieve your transcriptions on our EU server, replace `api.assemblyai.com` with `api.eu.assemblyai.com`.</Note>\nRetrieve the redacted audio object containing the status and URL to the redacted audio.\n\n * @summary Get redacted audio\n */\nexport const getRedactedAudio = <TData = AxiosResponse<RedactedAudioResponse>>(\n transcriptId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/v2/transcript/${transcriptId}/redacted-audio`, options)\n}\n\n/**\n * <Warning>Streaming Speech-to-Text is currently not available on the EU endpoint.</Warning>\n<Note>Any usage associated with a temporary token will be attributed to the API key that generated it.</Note>\nCreate a temporary authentication token for Streaming Speech-to-Text\n\n * @summary Create temporary authentication token for Streaming STT\n */\nexport const createTemporaryToken = <TData = AxiosResponse<RealtimeTemporaryTokenResponse>>(\n createRealtimeTemporaryTokenParams: CreateRealtimeTemporaryTokenParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/v2/realtime/token\", createRealtimeTemporaryTokenParams, options)\n}\n\n/**\n * Use the LeMUR task endpoint to input your own LLM prompt.\n\n * @summary Run a task using LeMUR\n */\nexport const lemurTask = <TData = AxiosResponse<LemurTaskResponse>>(\n lemurTaskParams: LemurTaskParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/lemur/v3/generate/task\", lemurTaskParams, options)\n}\n\n/**\n * Custom Summary allows you to distill a piece of audio into a few impactful sentences.\nYou can give the model context to obtain more targeted results while outputting the results in a variety of formats described in human language.\n\n * @summary Summarize a transcript using LeMUR\n */\nexport const lemurSummary = <TData = AxiosResponse<LemurSummaryResponse>>(\n lemurSummaryParams: LemurSummaryParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/lemur/v3/generate/summary\", lemurSummaryParams, options)\n}\n\n/**\n * Question & Answer allows you to ask free-form questions about a single transcript or a group of transcripts.\nThe questions can be any whose answers you find useful, such as judging whether a caller is likely to become a customer or whether all items on a meeting's agenda were covered.\n\n * @summary Ask questions using LeMUR\n */\nexport const lemurQuestionAnswer = <TData = AxiosResponse<LemurQuestionAnswerResponse>>(\n lemurQuestionAnswerParams: LemurQuestionAnswerParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/lemur/v3/generate/question-answer\", lemurQuestionAnswerParams, options)\n}\n\n/**\n * Retrieve a LeMUR response that was previously generated.\n\n * @summary Retrieve LeMUR response\n */\nexport const getLemurResponse = <TData = AxiosResponse<LemurResponse>>(\n requestId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/lemur/v3/${requestId}`, options)\n}\n\n/**\n * Delete the data for a previously submitted LeMUR request.\nThe LLM response data, as well as any context provided in the original request will be removed.\n\n * @summary Purge LeMUR request data\n */\nexport const purgeLemurRequestData = <TData = AxiosResponse<PurgeLemurRequestDataResponse>>(\n requestId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/lemur/v3/${requestId}`, options)\n}\n\nexport type UploadFileResult = AxiosResponse<UploadedFile>\nexport type CreateTranscriptResult = AxiosResponse<Transcript>\nexport type ListTranscriptsResult = AxiosResponse<TranscriptList>\nexport type GetTranscriptResult = AxiosResponse<Transcript>\nexport type DeleteTranscriptResult = AxiosResponse<Transcript>\nexport type GetSubtitlesResult = AxiosResponse<string>\nexport type GetTranscriptSentencesResult = AxiosResponse<SentencesResponse>\nexport type GetTranscriptParagraphsResult = AxiosResponse<ParagraphsResponse>\nexport type WordSearchResult = AxiosResponse<WordSearchResponse>\nexport type GetRedactedAudioResult = AxiosResponse<RedactedAudioResponse>\nexport type CreateTemporaryTokenResult = AxiosResponse<RealtimeTemporaryTokenResponse>\nexport type LemurTaskResult = AxiosResponse<LemurTaskResponse>\nexport type LemurSummaryResult = AxiosResponse<LemurSummaryResponse>\nexport type LemurQuestionAnswerResult = AxiosResponse<LemurQuestionAnswerResponse>\nexport type GetLemurResponseResult = AxiosResponse<LemurResponse>\nexport type PurgeLemurRequestDataResult = AxiosResponse<PurgeLemurRequestDataResponse>\n\nexport const getUploadFileResponseMock = (\n overrideResponse: Partial<UploadedFile> = {}\n): UploadedFile => ({ upload_url: faker.internet.url(), ...overrideResponse })\n\nexport const getCreateTranscriptResponseAutoHighlightsResultMock = (\n overrideResponse: Partial<AutoHighlightsResult> = {}\n): AutoHighlightsResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n count: faker.number.int({ min: undefined, max: undefined }),\n rank: faker.number.float(),\n text: faker.string.alpha(20),\n timestamps: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n }))\n })\n )\n },\n ...overrideResponse\n})\n\nexport const getCreateTranscriptResponseContentSafetyLabelsResultMock = (\n overrideResponse: Partial<ContentSafetyLabelsResult> = {}\n): ContentSafetyLabelsResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n labels: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n label: faker.string.alpha(20),\n confidence: faker.number.float(),\n severity: faker.number.float()\n })\n ),\n sentences_idx_start: faker.number.int({ min: undefined, max: undefined }),\n sentences_idx_end: faker.number.int({ min: undefined, max: undefined }),\n timestamp: {\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n }\n })\n ),\n summary: {\n [faker.string.alphanumeric(5)]: faker.number.float()\n },\n severity_score_summary: {\n [faker.string.alphanumeric(5)]: {\n low: faker.number.float(),\n medium: faker.number.float(),\n high: faker.number.float()\n }\n }\n },\n ...overrideResponse\n})\n\nexport const getCreateTranscriptResponseTopicDetectionModelResultMock = (\n overrideResponse: Partial<TopicDetectionModelResult> = {}\n): TopicDetectionModelResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n labels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({ relevance: faker.number.float(), label: faker.string.alpha(20) })\n ),\n undefined\n ]),\n timestamp: faker.helpers.arrayElement([\n {\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n },\n undefined\n ])\n })\n ),\n summary: {\n [faker.string.alphanumeric(5)]: faker.number.float()\n }\n },\n ...overrideResponse\n})\n\nexport const getCreateTranscriptResponseMock = (\n overrideResponse: Partial<Transcript> = {}\n): Transcript => ({\n id: faker.string.uuid(),\n audio_url: faker.internet.url(),\n status: faker.helpers.arrayElement(Object.values(TranscriptStatus)),\n language_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptLanguageCode)),\n faker.string.alpha(20)\n ]),\n undefined\n ]),\n language_detection: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n language_confidence_threshold: faker.number.float(),\n language_confidence: faker.number.float(),\n speech_model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SpeechModel)),\n null\n ]),\n text: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n words: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n utterances: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n confidence: faker.helpers.arrayElement([faker.number.float(), undefined]),\n audio_duration: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n punctuate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n format_text: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n disfluencies: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n multichannel: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n audio_channels: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n webhook_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webhook_status_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n webhook_auth: faker.datatype.boolean(),\n webhook_auth_header_name: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n speed_boost: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n auto_highlights: faker.datatype.boolean(),\n auto_highlights_result: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getCreateTranscriptResponseAutoHighlightsResultMock() },\n null\n ]),\n undefined\n ]),\n audio_start_from: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n audio_end_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n word_boost: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n boost_param: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n filter_profanity: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n redact_pii: faker.datatype.boolean(),\n redact_pii_audio: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n redact_pii_audio_quality: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(RedactPiiAudioQuality)),\n null\n ]),\n undefined\n ]),\n redact_pii_policies: faker.helpers.arrayElement([\n faker.helpers.arrayElement([[], null]),\n undefined\n ]),\n redact_pii_sub: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubstitutionPolicy)),\n undefined\n ]),\n speaker_labels: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n speakers_expected: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n content_safety: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n content_safety_labels: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getCreateTranscriptResponseContentSafetyLabelsResultMock() },\n null\n ]),\n undefined\n ]),\n iab_categories: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n iab_categories_result: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getCreateTranscriptResponseTopicDetectionModelResultMock() },\n null\n ]),\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n keyterms_prompt: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n auto_chapters: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n chapters: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n summarization: faker.datatype.boolean(),\n summary_type: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n summary_model: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n summary: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n custom_topics: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n topics: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n sentiment_analysis_results: faker.helpers.arrayElement([\n faker.helpers.arrayElement([[], null]),\n undefined\n ]),\n entity_detection: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n entities: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n speech_threshold: faker.helpers.arrayElement([faker.number.float(), undefined]),\n throttled: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n error: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n language_model: faker.string.alpha(20),\n acoustic_model: faker.string.alpha(20),\n ...overrideResponse\n})\n\nexport const getListTranscriptsResponseMock = (\n overrideResponse: Partial<TranscriptList> = {}\n): TranscriptList => ({\n page_details: {\n limit: faker.number.int({ min: undefined, max: undefined }),\n result_count: faker.number.int({ min: undefined, max: undefined }),\n current_url: faker.internet.url(),\n prev_url: faker.internet.url(),\n next_url: faker.internet.url()\n },\n transcripts: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n id: faker.string.uuid(),\n resource_url: faker.internet.url(),\n status: faker.helpers.arrayElement(Object.values(TranscriptStatus)),\n created: faker.helpers.fromRegExp(\"^(?:(\\d{4}-\\d{2}-\\d{2})T(\\d{2}:\\d{2}:\\d{2}(?:\\.\\d+)?))$\"),\n completed: faker.helpers.arrayElement([faker.string.alpha(20), null]),\n audio_url: faker.internet.url(),\n error: faker.helpers.arrayElement([faker.string.alpha(20), null])\n })\n ),\n ...overrideResponse\n})\n\nexport const getGetTranscriptResponseAutoHighlightsResultMock = (\n overrideResponse: Partial<AutoHighlightsResult> = {}\n): AutoHighlightsResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n count: faker.number.int({ min: undefined, max: undefined }),\n rank: faker.number.float(),\n text: faker.string.alpha(20),\n timestamps: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n }))\n })\n )\n },\n ...overrideResponse\n})\n\nexport const getGetTranscriptResponseContentSafetyLabelsResultMock = (\n overrideResponse: Partial<ContentSafetyLabelsResult> = {}\n): ContentSafetyLabelsResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n labels: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n label: faker.string.alpha(20),\n confidence: faker.number.float(),\n severity: faker.number.float()\n })\n ),\n sentences_idx_start: faker.number.int({ min: undefined, max: undefined }),\n sentences_idx_end: faker.number.int({ min: undefined, max: undefined }),\n timestamp: {\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n }\n })\n ),\n summary: {\n [faker.string.alphanumeric(5)]: faker.number.float()\n },\n severity_score_summary: {\n [faker.string.alphanumeric(5)]: {\n low: faker.number.float(),\n medium: faker.number.float(),\n high: faker.number.float()\n }\n }\n },\n ...overrideResponse\n})\n\nexport const getGetTranscriptResponseTopicDetectionModelResultMock = (\n overrideResponse: Partial<TopicDetectionModelResult> = {}\n): TopicDetectionModelResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n labels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({ relevance: faker.number.float(), label: faker.string.alpha(20) })\n ),\n undefined\n ]),\n timestamp: faker.helpers.arrayElement([\n {\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n },\n undefined\n ])\n })\n ),\n summary: {\n [faker.string.alphanumeric(5)]: faker.number.float()\n }\n },\n ...overrideResponse\n})\n\nexport const getGetTranscriptResponseMock = (\n overrideResponse: Partial<Transcript> = {}\n): Transcript => ({\n id: faker.string.uuid(),\n audio_url: faker.internet.url(),\n status: faker.helpers.arrayElement(Object.values(TranscriptStatus)),\n language_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptLanguageCode)),\n faker.string.alpha(20)\n ]),\n undefined\n ]),\n language_detection: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n language_confidence_threshold: faker.number.float(),\n language_confidence: faker.number.float(),\n speech_model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SpeechModel)),\n null\n ]),\n text: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n words: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n utterances: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n confidence: faker.helpers.arrayElement([faker.number.float(), undefined]),\n audio_duration: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n punctuate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n format_text: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n disfluencies: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n multichannel: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n audio_channels: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n webhook_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webhook_status_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n webhook_auth: faker.datatype.boolean(),\n webhook_auth_header_name: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n speed_boost: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n auto_highlights: faker.datatype.boolean(),\n auto_highlights_result: faker.helpers.arrayElement([\n faker.helpers.arrayElement([{ ...getGetTranscriptResponseAutoHighlightsResultMock() }, null]),\n undefined\n ]),\n audio_start_from: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n audio_end_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n word_boost: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n boost_param: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n filter_profanity: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n redact_pii: faker.datatype.boolean(),\n redact_pii_audio: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n redact_pii_audio_quality: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(RedactPiiAudioQuality)),\n null\n ]),\n undefined\n ]),\n redact_pii_policies: faker.helpers.arrayElement([\n faker.helpers.arrayElement([[], null]),\n undefined\n ]),\n redact_pii_sub: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubstitutionPolicy)),\n undefined\n ]),\n speaker_labels: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n speakers_expected: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n content_safety: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n content_safety_labels: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getGetTranscriptResponseContentSafetyLabelsResultMock() },\n null\n ]),\n undefined\n ]),\n iab_categories: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n iab_categories_result: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getGetTranscriptResponseTopicDetectionModelResultMock() },\n null\n ]),\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n keyterms_prompt: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n auto_chapters: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n chapters: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n summarization: faker.datatype.boolean(),\n summary_type: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n summary_model: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n summary: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n custom_topics: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n topics: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n sentiment_analysis_results: faker.helpers.arrayElement([\n faker.helpers.arrayElement([[], null]),\n undefined\n ]),\n entity_detection: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n entities: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n speech_threshold: faker.helpers.arrayElement([faker.number.float(), undefined]),\n throttled: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n error: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n language_model: faker.string.alpha(20),\n acoustic_model: faker.string.alpha(20),\n ...overrideResponse\n})\n\nexport const getDeleteTranscriptResponseAutoHighlightsResultMock = (\n overrideResponse: Partial<AutoHighlightsResult> = {}\n): AutoHighlightsResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n count: faker.number.int({ min: undefined, max: undefined }),\n rank: faker.number.float(),\n text: faker.string.alpha(20),\n timestamps: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => ({\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n }))\n })\n )\n },\n ...overrideResponse\n})\n\nexport const getDeleteTranscriptResponseContentSafetyLabelsResultMock = (\n overrideResponse: Partial<ContentSafetyLabelsResult> = {}\n): ContentSafetyLabelsResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n labels: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n label: faker.string.alpha(20),\n confidence: faker.number.float(),\n severity: faker.number.float()\n })\n ),\n sentences_idx_start: faker.number.int({ min: undefined, max: undefined }),\n sentences_idx_end: faker.number.int({ min: undefined, max: undefined }),\n timestamp: {\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n }\n })\n ),\n summary: {\n [faker.string.alphanumeric(5)]: faker.number.float()\n },\n severity_score_summary: {\n [faker.string.alphanumeric(5)]: {\n low: faker.number.float(),\n medium: faker.number.float(),\n high: faker.number.float()\n }\n }\n },\n ...overrideResponse\n})\n\nexport const getDeleteTranscriptResponseTopicDetectionModelResultMock = (\n overrideResponse: Partial<TopicDetectionModelResult> = {}\n): TopicDetectionModelResult => ({\n ...{\n status: faker.helpers.arrayElement(Object.values(AudioIntelligenceModelStatus)),\n results: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n labels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({ relevance: faker.number.float(), label: faker.string.alpha(20) })\n ),\n undefined\n ]),\n timestamp: faker.helpers.arrayElement([\n {\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined })\n },\n undefined\n ])\n })\n ),\n summary: {\n [faker.string.alphanumeric(5)]: faker.number.float()\n }\n },\n ...overrideResponse\n})\n\nexport const getDeleteTranscriptResponseMock = (\n overrideResponse: Partial<Transcript> = {}\n): Transcript => ({\n id: faker.string.uuid(),\n audio_url: faker.internet.url(),\n status: faker.helpers.arrayElement(Object.values(TranscriptStatus)),\n language_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(TranscriptLanguageCode)),\n faker.string.alpha(20)\n ]),\n undefined\n ]),\n language_detection: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n language_confidence_threshold: faker.number.float(),\n language_confidence: faker.number.float(),\n speech_model: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SpeechModel)),\n null\n ]),\n text: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n words: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n utterances: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n confidence: faker.helpers.arrayElement([faker.number.float(), undefined]),\n audio_duration: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n punctuate: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n format_text: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n disfluencies: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n multichannel: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n audio_channels: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n webhook_url: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webhook_status_code: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n webhook_auth: faker.datatype.boolean(),\n webhook_auth_header_name: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n speed_boost: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n auto_highlights: faker.datatype.boolean(),\n auto_highlights_result: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getDeleteTranscriptResponseAutoHighlightsResultMock() },\n null\n ]),\n undefined\n ]),\n audio_start_from: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n audio_end_at: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n word_boost: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n boost_param: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n filter_profanity: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n redact_pii: faker.datatype.boolean(),\n redact_pii_audio: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n redact_pii_audio_quality: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(RedactPiiAudioQuality)),\n null\n ]),\n undefined\n ]),\n redact_pii_policies: faker.helpers.arrayElement([\n faker.helpers.arrayElement([[], null]),\n undefined\n ]),\n redact_pii_sub: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(SubstitutionPolicy)),\n undefined\n ]),\n speaker_labels: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n speakers_expected: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.number.int({ min: undefined, max: undefined }), null]),\n undefined\n ]),\n content_safety: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n content_safety_labels: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getDeleteTranscriptResponseContentSafetyLabelsResultMock() },\n null\n ]),\n undefined\n ]),\n iab_categories: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n iab_categories_result: faker.helpers.arrayElement([\n faker.helpers.arrayElement([\n { ...getDeleteTranscriptResponseTopicDetectionModelResultMock() },\n null\n ]),\n undefined\n ]),\n custom_spelling: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n keyterms_prompt: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n prompt: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n auto_chapters: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n chapters: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n summarization: faker.datatype.boolean(),\n summary_type: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n summary_model: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n summary: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n custom_topics: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n topics: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.string.alpha(20)\n ),\n undefined\n ]),\n sentiment_analysis: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n sentiment_analysis_results: faker.helpers.arrayElement([\n faker.helpers.arrayElement([[], null]),\n undefined\n ]),\n entity_detection: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n entities: faker.helpers.arrayElement([faker.helpers.arrayElement([[], null]), undefined]),\n speech_threshold: faker.helpers.arrayElement([faker.number.float(), undefined]),\n throttled: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.datatype.boolean(), null]),\n undefined\n ]),\n error: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n language_model: faker.string.alpha(20),\n acoustic_model: faker.string.alpha(20),\n ...overrideResponse\n})\n\nexport const getGetSubtitlesResponseMock = (): string => faker.word.sample()\n\nexport const getGetTranscriptSentencesResponseMock = (\n overrideResponse: Partial<SentencesResponse> = {}\n): SentencesResponse => ({\n id: faker.string.uuid(),\n confidence: faker.number.float(),\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n sentences: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.float(),\n words: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n confidence: faker.number.float(),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n text: faker.string.alpha(20),\n channel: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n speaker: faker.helpers.arrayElement([faker.string.alpha(20), null])\n })\n ),\n channel: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n speaker: faker.helpers.arrayElement([faker.string.alpha(20), null])\n })\n ),\n ...overrideResponse\n})\n\nexport const getGetTranscriptParagraphsResponseMock = (\n overrideResponse: Partial<ParagraphsResponse> = {}\n): ParagraphsResponse => ({\n id: faker.string.uuid(),\n confidence: faker.number.float(),\n audio_duration: faker.number.int({ min: undefined, max: undefined }),\n paragraphs: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n confidence: faker.number.float(),\n words: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n confidence: faker.number.float(),\n start: faker.number.int({ min: undefined, max: undefined }),\n end: faker.number.int({ min: undefined, max: undefined }),\n text: faker.string.alpha(20),\n channel: faker.helpers.arrayElement([\n faker.helpers.arrayElement([faker.string.alpha(20), null]),\n undefined\n ]),\n speaker: faker.helpers.arrayElement([faker.string.alpha(20), null])\n })\n )\n })\n ),\n ...overrideResponse\n})\n\nexport const getWordSearchResponseMock = (\n overrideResponse: Partial<WordSearchResponse> = {}\n): WordSearchResponse => ({\n id: faker.string.uuid(),\n total_count: faker.number.int({ min: undefined, max: undefined }),\n matches: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n text: faker.string.alpha(20),\n count: faker.number.int({ min: undefined, max: undefined }),\n timestamps: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() =>\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.number.int({ min: undefined, max: undefined })\n )\n ),\n indexes: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => faker.number.int({ min: undefined, max: undefined })\n )\n })\n ),\n ...overrideResponse\n})\n\nexport const getGetRedactedAudioResponseMock = (\n overrideResponse: Partial<RedactedAudioResponse> = {}\n): RedactedAudioResponse => ({\n status: faker.helpers.arrayElement(Object.values(RedactedAudioStatus)),\n redacted_audio_url: faker.internet.url(),\n ...overrideResponse\n})\n\nexport const getCreateTemporaryTokenResponseMock = (\n overrideResponse: Partial<RealtimeTemporaryTokenResponse> = {}\n): RealtimeTemporaryTokenResponse => ({ token: faker.string.alpha(20), ...overrideResponse })\n\nexport const getLemurTaskResponseMock = (): LemurTaskResponse => ({\n ...{\n ...{ response: faker.string.alpha(20) },\n ...{\n request_id: faker.string.uuid(),\n usage: {\n input_tokens: faker.number.int({ min: 0, max: undefined }),\n output_tokens: faker.number.int({ min: 0, max: undefined })\n }\n }\n }\n})\n\nexport const getLemurSummaryResponseMock = (): LemurSummaryResponse => ({\n ...{\n ...{ response: faker.string.alpha(20) },\n ...{\n request_id: faker.string.uuid(),\n usage: {\n input_tokens: faker.number.int({ min: 0, max: undefined }),\n output_tokens: faker.number.int({ min: 0, max: undefined })\n }\n }\n }\n})\n\nexport const getLemurQuestionAnswerResponseMock = (): LemurQuestionAnswerResponse => ({\n ...{\n request_id: faker.string.uuid(),\n usage: {\n input_tokens: faker.number.int({ min: 0, max: undefined }),\n output_tokens: faker.number.int({ min: 0, max: undefined })\n }\n },\n ...{\n response: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({ question: faker.string.alpha(20), answer: faker.string.alpha(20) })\n )\n }\n})\n\nexport const getGetLemurResponseResponseLemurStringResponseMock = (\n overrideResponse: Partial<LemurStringResponse> = {}\n): LemurStringResponse => ({\n ...{\n ...{ response: faker.string.alpha(20) },\n ...{\n request_id: faker.string.uuid(),\n usage: {\n input_tokens: faker.number.int({ min: 0, max: undefined }),\n output_tokens: faker.number.int({ min: 0, max: undefined })\n }\n }\n },\n ...overrideResponse\n})\n\nexport const getGetLemurResponseResponseLemurQuestionAnswerResponseMock = (\n overrideResponse: Partial<LemurQuestionAnswerResponse> = {}\n): LemurQuestionAnswerResponse => ({\n ...{\n ...{\n request_id: faker.string.uuid(),\n usage: {\n input_tokens: faker.number.int({ min: 0, max: undefined }),\n output_tokens: faker.number.int({ min: 0, max: undefined })\n }\n },\n ...{\n response: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({ question: faker.string.alpha(20), answer: faker.string.alpha(20) })\n )\n }\n },\n ...overrideResponse\n})\n\nexport const getGetLemurResponseResponseMock = (): LemurResponse =>\n faker.helpers.arrayElement([\n { ...getGetLemurResponseResponseLemurStringResponseMock() },\n { ...getGetLemurResponseResponseLemurQuestionAnswerResponseMock() }\n ])\n\nexport const getPurgeLemurRequestDataResponseMock = (\n overrideResponse: Partial<PurgeLemurRequestDataResponse> = {}\n): PurgeLemurRequestDataResponse => ({\n request_id: faker.string.uuid(),\n request_id_to_purge: faker.string.uuid(),\n deleted: faker.datatype.boolean(),\n ...overrideResponse\n})\n\nexport const getUploadFileMockHandler = (\n overrideResponse?:\n | UploadedFile\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<UploadedFile> | UploadedFile)\n) => {\n return http.post(\"https://api.assemblyai.com/v2/upload\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getUploadFileResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getCreateTranscriptMockHandler = (\n overrideResponse?:\n | Transcript\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<Transcript> | Transcript)\n) => {\n return http.post(\"https://api.assemblyai.com/v2/transcript\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getCreateTranscriptResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getListTranscriptsMockHandler = (\n overrideResponse?:\n | TranscriptList\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<TranscriptList> | TranscriptList)\n) => {\n return http.get(\"https://api.assemblyai.com/v2/transcript\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getListTranscriptsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getGetTranscriptMockHandler = (\n overrideResponse?:\n | Transcript\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Transcript> | Transcript)\n) => {\n return http.get(\"https://api.assemblyai.com/v2/transcript/:transcriptId\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getGetTranscriptResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDeleteTranscriptMockHandler = (\n overrideResponse?:\n | Transcript\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<Transcript> | Transcript)\n) => {\n return http.delete(\"https://api.assemblyai.com/v2/transcript/:transcriptId\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDeleteTranscriptResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getGetSubtitlesMockHandler = (\n overrideResponse?:\n | string\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<string> | string)\n) => {\n return http.get(\n \"https://api.assemblyai.com/v2/transcript/:transcriptId/:subtitleFormat\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(getGetSubtitlesResponseMock(), {\n status: 200,\n headers: { \"Content-Type\": \"text/plain\" }\n })\n }\n )\n}\n\nexport const getGetTranscriptSentencesMockHandler = (\n overrideResponse?:\n | SentencesResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<SentencesResponse> | SentencesResponse)\n) => {\n return http.get(\n \"https://api.assemblyai.com/v2/transcript/:transcriptId/sentences\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getGetTranscriptSentencesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getGetTranscriptParagraphsMockHandler = (\n overrideResponse?:\n | ParagraphsResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ParagraphsResponse> | ParagraphsResponse)\n) => {\n return http.get(\n \"https://api.assemblyai.com/v2/transcript/:transcriptId/paragraphs\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getGetTranscriptParagraphsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getWordSearchMockHandler = (\n overrideResponse?:\n | WordSearchResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<WordSearchResponse> | WordSearchResponse)\n) => {\n return http.get(\n \"https://api.assemblyai.com/v2/transcript/:transcriptId/word-search\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getWordSearchResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getGetRedactedAudioMockHandler = (\n overrideResponse?:\n | RedactedAudioResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<RedactedAudioResponse> | RedactedAudioResponse)\n) => {\n return http.get(\n \"https://api.assemblyai.com/v2/transcript/:transcriptId/redacted-audio\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getGetRedactedAudioResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getCreateTemporaryTokenMockHandler = (\n overrideResponse?:\n | RealtimeTemporaryTokenResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<RealtimeTemporaryTokenResponse> | RealtimeTemporaryTokenResponse)\n) => {\n return http.post(\"https://api.assemblyai.com/v2/realtime/token\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getCreateTemporaryTokenResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getLemurTaskMockHandler = (\n overrideResponse?:\n | LemurTaskResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<LemurTaskResponse> | LemurTaskResponse)\n) => {\n return http.post(\"https://api.assemblyai.com/lemur/v3/generate/task\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getLemurTaskResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getLemurSummaryMockHandler = (\n overrideResponse?:\n | LemurSummaryResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<LemurSummaryResponse> | LemurSummaryResponse)\n) => {\n return http.post(\"https://api.assemblyai.com/lemur/v3/generate/summary\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getLemurSummaryResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getLemurQuestionAnswerMockHandler = (\n overrideResponse?:\n | LemurQuestionAnswerResponse\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<LemurQuestionAnswerResponse> | LemurQuestionAnswerResponse)\n) => {\n return http.post(\"https://api.assemblyai.com/lemur/v3/generate/question-answer\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getLemurQuestionAnswerResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getGetLemurResponseMockHandler = (\n overrideResponse?:\n | LemurResponse\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<LemurResponse> | LemurResponse)\n) => {\n return http.get(\"https://api.assemblyai.com/lemur/v3/:requestId\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getGetLemurResponseResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getPurgeLemurRequestDataMockHandler = (\n overrideResponse?:\n | PurgeLemurRequestDataResponse\n | ((\n info: Parameters<Parameters<typeof http.delete>[1]>[0]\n ) => Promise<PurgeLemurRequestDataResponse> | PurgeLemurRequestDataResponse)\n) => {\n return http.delete(\"https://api.assemblyai.com/lemur/v3/:requestId\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getPurgeLemurRequestDataResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\nexport const getAssemblyAIAPIMock = () => [\n getUploadFileMockHandler(),\n getCreateTranscriptMockHandler(),\n getListTranscriptsMockHandler(),\n getGetTranscriptMockHandler(),\n getDeleteTranscriptMockHandler(),\n getGetSubtitlesMockHandler(),\n getGetTranscriptSentencesMockHandler(),\n getGetTranscriptParagraphsMockHandler(),\n getWordSearchMockHandler(),\n getGetRedactedAudioMockHandler(),\n getCreateTemporaryTokenMockHandler(),\n getLemurTaskMockHandler(),\n getLemurSummaryMockHandler(),\n getLemurQuestionAnswerMockHandler(),\n getGetLemurResponseMockHandler(),\n getPurgeLemurRequestDataMockHandler()\n]\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\nexport * from \"./audioIntelligenceModelStatus\"\nexport * from \"./autoHighlightResult\"\nexport * from \"./autoHighlightsResult\"\nexport * from \"./badRequestResponse\"\nexport * from \"./cannotAccessUploadedFileResponse\"\nexport * from \"./chapter\"\nexport * from \"./contentSafetyLabel\"\nexport * from \"./contentSafetyLabelResult\"\nexport * from \"./contentSafetyLabelsResult\"\nexport * from \"./contentSafetyLabelsResultSeverityScoreSummary\"\nexport * from \"./contentSafetyLabelsResultSummary\"\nexport * from \"./createRealtimeTemporaryTokenParams\"\nexport * from \"./entity\"\nexport * from \"./entityType\"\nexport * from \"./error\"\nexport * from \"./gatewayTimeoutResponse\"\nexport * from \"./getSubtitlesParams\"\nexport * from \"./internalServerErrorResponse\"\nexport * from \"./lemurActionItemsParams\"\nexport * from \"./lemurActionItemsParamsAllOf\"\nexport * from \"./lemurActionItemsResponse\"\nexport * from \"./lemurBaseParams\"\nexport * from \"./lemurBaseParamsContext\"\nexport * from \"./lemurBaseParamsContextOneOf\"\nexport * from \"./lemurBaseParamsFinalModel\"\nexport * from \"./lemurBaseResponse\"\nexport * from \"./lemurModel\"\nexport * from \"./lemurQuestion\"\nexport * from \"./lemurQuestionAnswer\"\nexport * from \"./lemurQuestionAnswerParams\"\nexport * from \"./lemurQuestionAnswerParamsAllOf\"\nexport * from \"./lemurQuestionAnswerResponse\"\nexport * from \"./lemurQuestionAnswerResponseAllOf\"\nexport * from \"./lemurQuestionContext\"\nexport * from \"./lemurQuestionContextOneOf\"\nexport * from \"./lemurResponse\"\nexport * from \"./lemurStringResponse\"\nexport * from \"./lemurStringResponseAllOf\"\nexport * from \"./lemurSummaryParams\"\nexport * from \"./lemurSummaryParamsAllOf\"\nexport * from \"./lemurSummaryResponse\"\nexport * from \"./lemurTaskParams\"\nexport * from \"./lemurTaskParamsAllOf\"\nexport * from \"./lemurTaskResponse\"\nexport * from \"./lemurUsage\"\nexport * from \"./listTranscriptParams\"\nexport * from \"./listTranscriptsParams\"\nexport * from \"./notFoundResponse\"\nexport * from \"./pageDetails\"\nexport * from \"./pageDetailsNextUrl\"\nexport * from \"./pageDetailsPrevUrl\"\nexport * from \"./paragraphsResponse\"\nexport * from \"./piiPolicy\"\nexport * from \"./purgeLemurRequestDataResponse\"\nexport * from \"./realtimeTemporaryTokenResponse\"\nexport * from \"./redactedAudioNotification\"\nexport * from \"./redactedAudioResponse\"\nexport * from \"./redactedAudioStatus\"\nexport * from \"./redactPiiAudioQuality\"\nexport * from \"./sentencesResponse\"\nexport * from \"./sentiment\"\nexport * from \"./sentimentAnalysisResult\"\nexport * from \"./sentimentAnalysisResultChannel\"\nexport * from \"./sentimentAnalysisResultSpeaker\"\nexport * from \"./serviceUnavailableResponse\"\nexport * from \"./severityScoreSummary\"\nexport * from \"./speechModel\"\nexport * from \"./substitutionPolicy\"\nexport * from \"./subtitleFormat\"\nexport * from \"./summaryModel\"\nexport * from \"./summaryType\"\nexport * from \"./timestamp\"\nexport * from \"./tooManyRequestsResponse\"\nexport * from \"./topicDetectionModelResult\"\nexport * from \"./topicDetectionModelResultSummary\"\nexport * from \"./topicDetectionResult\"\nexport * from \"./topicDetectionResultLabelsItem\"\nexport * from \"./transcript\"\nexport * from \"./transcriptAudioDuration\"\nexport * from \"./transcriptAudioEndAt\"\nexport * from \"./transcriptAudioStartFrom\"\nexport * from \"./transcriptAutoChapters\"\nexport * from \"./transcriptAutoHighlightsResult\"\nexport * from \"./transcriptBoostParam\"\nexport * from \"./transcriptBoostParamProperty\"\nexport * from \"./transcriptChapters\"\nexport * from \"./transcriptConfidence\"\nexport * from \"./transcriptContentSafety\"\nexport * from \"./transcriptContentSafetyLabels\"\nexport * from \"./transcriptCustomSpelling\"\nexport * from \"./transcriptCustomSpellingProperty\"\nexport * from \"./transcriptCustomTopics\"\nexport * from \"./transcriptDisfluencies\"\nexport * from \"./transcriptEntities\"\nexport * from \"./transcriptEntityDetection\"\nexport * from \"./transcriptFilterProfanity\"\nexport * from \"./transcriptFormatText\"\nexport * from \"./transcriptIabCategories\"\nexport * from \"./transcriptIabCategoriesResult\"\nexport * from \"./transcriptLanguageCode\"\nexport * from \"./transcriptLanguageCodeProperty\"\nexport * from \"./transcriptLanguageConfidence\"\nexport * from \"./transcriptLanguageConfidenceThreshold\"\nexport * from \"./transcriptLanguageDetection\"\nexport * from \"./transcriptList\"\nexport * from \"./transcriptListItem\"\nexport * from \"./transcriptListItemCompleted\"\nexport * from \"./transcriptListItemError\"\nexport * from \"./transcriptMultichannel\"\nexport * from \"./transcriptOptionalParams\"\nexport * from \"./transcriptOptionalParamsLanguageCode\"\nexport * from \"./transcriptOptionalParamsLanguageCodeOneOf\"\nexport * from \"./transcriptOptionalParamsRedactPiiSub\"\nexport * from \"./transcriptOptionalParamsSpeakersExpected\"\nexport * from \"./transcriptOptionalParamsSpeechModel\"\nexport * from \"./transcriptOptionalParamsSpeechThreshold\"\nexport * from \"./transcriptOptionalParamsWebhookAuthHeaderName\"\nexport * from \"./transcriptOptionalParamsWebhookAuthHeaderValue\"\nexport * from \"./transcriptParagraph\"\nexport * from \"./transcriptParams\"\nexport * from \"./transcriptParamsAllOf\"\nexport * from \"./transcriptPunctuate\"\nexport * from \"./transcriptReadyNotification\"\nexport * from \"./transcriptReadyStatus\"\nexport * from \"./transcriptRedactPiiAudio\"\nexport * from \"./transcriptRedactPiiAudioQuality\"\nexport * from \"./transcriptRedactPiiPolicies\"\nexport * from \"./transcriptSentence\"\nexport * from \"./transcriptSentenceChannel\"\nexport * from \"./transcriptSentenceSpeaker\"\nexport * from \"./transcriptSentimentAnalysis\"\nexport * from \"./transcriptSentimentAnalysisResults\"\nexport * from \"./transcriptSpeakerLabels\"\nexport * from \"./transcriptSpeakersExpected\"\nexport * from \"./transcriptSpeechModel\"\nexport * from \"./transcriptSpeechThreshold\"\nexport * from \"./transcriptSpeedBoost\"\nexport * from \"./transcriptStatus\"\nexport * from \"./transcriptSummary\"\nexport * from \"./transcriptSummaryModel\"\nexport * from \"./transcriptSummaryType\"\nexport * from \"./transcriptText\"\nexport * from \"./transcriptThrottled\"\nexport * from \"./transcriptUtterance\"\nexport * from \"./transcriptUtteranceChannel\"\nexport * from \"./transcriptUtterances\"\nexport * from \"./transcriptWebhookAuthHeaderName\"\nexport * from \"./transcriptWebhookNotification\"\nexport * from \"./transcriptWebhookStatusCode\"\nexport * from \"./transcriptWebhookUrl\"\nexport * from \"./transcriptWord\"\nexport * from \"./transcriptWordChannel\"\nexport * from \"./transcriptWordSpeaker\"\nexport * from \"./transcriptWords\"\nexport * from \"./unauthorizedResponse\"\nexport * from \"./uploadedFile\"\nexport * from \"./wordSearchMatch\"\nexport * from \"./wordSearchParams\"\nexport * from \"./wordSearchResponse\"\nexport * from \"./wordSearchTimestamp\"\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * Either success, or unavailable in the rare case that the model failed\n */\nexport type AudioIntelligenceModelStatus =\n (typeof AudioIntelligenceModelStatus)[keyof typeof AudioIntelligenceModelStatus]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const AudioIntelligenceModelStatus = {\n success: \"success\",\n unavailable: \"unavailable\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The type of entity for the detected entity\n */\nexport type EntityType = (typeof EntityType)[keyof typeof EntityType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const EntityType = {\n account_number: \"account_number\",\n banking_information: \"banking_information\",\n blood_type: \"blood_type\",\n credit_card_cvv: \"credit_card_cvv\",\n credit_card_expiration: \"credit_card_expiration\",\n credit_card_number: \"credit_card_number\",\n date: \"date\",\n date_interval: \"date_interval\",\n date_of_birth: \"date_of_birth\",\n drivers_license: \"drivers_license\",\n drug: \"drug\",\n duration: \"duration\",\n email_address: \"email_address\",\n event: \"event\",\n filename: \"filename\",\n gender_sexuality: \"gender_sexuality\",\n healthcare_number: \"healthcare_number\",\n injury: \"injury\",\n ip_address: \"ip_address\",\n language: \"language\",\n location: \"location\",\n marital_status: \"marital_status\",\n medical_condition: \"medical_condition\",\n medical_process: \"medical_process\",\n money_amount: \"money_amount\",\n nationality: \"nationality\",\n number_sequence: \"number_sequence\",\n occupation: \"occupation\",\n organization: \"organization\",\n passport_number: \"passport_number\",\n password: \"password\",\n person_age: \"person_age\",\n person_name: \"person_name\",\n phone_number: \"phone_number\",\n physical_attribute: \"physical_attribute\",\n political_affiliation: \"political_affiliation\",\n religion: \"religion\",\n statistics: \"statistics\",\n time: \"time\",\n url: \"url\",\n us_social_security_number: \"us_social_security_number\",\n username: \"username\",\n vehicle_id: \"vehicle_id\",\n zodiac_sign: \"zodiac_sign\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The model that is used for the final prompt after compression is performed.\n\n */\nexport type LemurModel = (typeof LemurModel)[keyof typeof LemurModel]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const LemurModel = {\n \"anthropic/claude-3-5-sonnet\": \"anthropic/claude-3-5-sonnet\",\n \"anthropic/claude-3-opus\": \"anthropic/claude-3-opus\",\n \"anthropic/claude-3-haiku\": \"anthropic/claude-3-haiku\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The type of PII to redact\n */\nexport type PiiPolicy = (typeof PiiPolicy)[keyof typeof PiiPolicy]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const PiiPolicy = {\n account_number: \"account_number\",\n banking_information: \"banking_information\",\n blood_type: \"blood_type\",\n credit_card_cvv: \"credit_card_cvv\",\n credit_card_expiration: \"credit_card_expiration\",\n credit_card_number: \"credit_card_number\",\n date: \"date\",\n date_interval: \"date_interval\",\n date_of_birth: \"date_of_birth\",\n drivers_license: \"drivers_license\",\n drug: \"drug\",\n duration: \"duration\",\n email_address: \"email_address\",\n event: \"event\",\n filename: \"filename\",\n gender_sexuality: \"gender_sexuality\",\n healthcare_number: \"healthcare_number\",\n injury: \"injury\",\n ip_address: \"ip_address\",\n language: \"language\",\n location: \"location\",\n marital_status: \"marital_status\",\n medical_condition: \"medical_condition\",\n medical_process: \"medical_process\",\n money_amount: \"money_amount\",\n nationality: \"nationality\",\n number_sequence: \"number_sequence\",\n occupation: \"occupation\",\n organization: \"organization\",\n passport_number: \"passport_number\",\n password: \"password\",\n person_age: \"person_age\",\n person_name: \"person_name\",\n phone_number: \"phone_number\",\n physical_attribute: \"physical_attribute\",\n political_affiliation: \"political_affiliation\",\n religion: \"religion\",\n statistics: \"statistics\",\n time: \"time\",\n url: \"url\",\n us_social_security_number: \"us_social_security_number\",\n username: \"username\",\n vehicle_id: \"vehicle_id\",\n zodiac_sign: \"zodiac_sign\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The status of the redacted audio\n */\nexport type RedactedAudioStatus = (typeof RedactedAudioStatus)[keyof typeof RedactedAudioStatus]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const RedactedAudioStatus = {\n redacted_audio_ready: \"redacted_audio_ready\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * Controls the filetype of the audio created by redact_pii_audio. Currently supports mp3 (default) and wav. See [PII redaction](https://www.assemblyai.com/docs/models/pii-redaction) for more details.\n */\nexport type RedactPiiAudioQuality =\n (typeof RedactPiiAudioQuality)[keyof typeof RedactPiiAudioQuality]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const RedactPiiAudioQuality = {\n mp3: \"mp3\",\n wav: \"wav\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\nexport type Sentiment = (typeof Sentiment)[keyof typeof Sentiment]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const Sentiment = {\n POSITIVE: \"POSITIVE\",\n NEUTRAL: \"NEUTRAL\",\n NEGATIVE: \"NEGATIVE\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The speech model to use for the transcription.\n */\nexport type SpeechModel = (typeof SpeechModel)[keyof typeof SpeechModel]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SpeechModel = {\n best: \"best\",\n \"slam-1\": \"slam-1\",\n universal: \"universal\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The replacement logic for detected PII, can be \"entity_name\" or \"hash\". See [PII redaction](https://www.assemblyai.com/docs/models/pii-redaction) for more details.\n */\nexport type SubstitutionPolicy = (typeof SubstitutionPolicy)[keyof typeof SubstitutionPolicy]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SubstitutionPolicy = {\n entity_name: \"entity_name\",\n hash: \"hash\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * Format of the subtitles\n */\nexport type SubtitleFormat = (typeof SubtitleFormat)[keyof typeof SubtitleFormat]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SubtitleFormat = {\n srt: \"srt\",\n vtt: \"vtt\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The model to summarize the transcript\n */\nexport type SummaryModel = (typeof SummaryModel)[keyof typeof SummaryModel]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SummaryModel = {\n informative: \"informative\",\n conversational: \"conversational\",\n catchy: \"catchy\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The type of summary\n */\nexport type SummaryType = (typeof SummaryType)[keyof typeof SummaryType]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const SummaryType = {\n bullets: \"bullets\",\n bullets_verbose: \"bullets_verbose\",\n gist: \"gist\",\n headline: \"headline\",\n paragraph: \"paragraph\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * How much to boost specified words\n */\nexport type TranscriptBoostParam = (typeof TranscriptBoostParam)[keyof typeof TranscriptBoostParam]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptBoostParam = {\n low: \"low\",\n default: \"default\",\n high: \"high\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The language of your audio file. Possible values are found in [Supported Languages](https://www.assemblyai.com/docs/concepts/supported-languages).\nThe default value is 'en_us'.\n\n */\nexport type TranscriptLanguageCode =\n (typeof TranscriptLanguageCode)[keyof typeof TranscriptLanguageCode]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptLanguageCode = {\n en: \"en\",\n en_au: \"en_au\",\n en_uk: \"en_uk\",\n en_us: \"en_us\",\n es: \"es\",\n fr: \"fr\",\n de: \"de\",\n it: \"it\",\n pt: \"pt\",\n nl: \"nl\",\n af: \"af\",\n sq: \"sq\",\n am: \"am\",\n ar: \"ar\",\n hy: \"hy\",\n as: \"as\",\n az: \"az\",\n ba: \"ba\",\n eu: \"eu\",\n be: \"be\",\n bn: \"bn\",\n bs: \"bs\",\n br: \"br\",\n bg: \"bg\",\n my: \"my\",\n ca: \"ca\",\n zh: \"zh\",\n hr: \"hr\",\n cs: \"cs\",\n da: \"da\",\n et: \"et\",\n fo: \"fo\",\n fi: \"fi\",\n gl: \"gl\",\n ka: \"ka\",\n el: \"el\",\n gu: \"gu\",\n ht: \"ht\",\n ha: \"ha\",\n haw: \"haw\",\n he: \"he\",\n hi: \"hi\",\n hu: \"hu\",\n is: \"is\",\n id: \"id\",\n ja: \"ja\",\n jw: \"jw\",\n kn: \"kn\",\n kk: \"kk\",\n km: \"km\",\n ko: \"ko\",\n lo: \"lo\",\n la: \"la\",\n lv: \"lv\",\n ln: \"ln\",\n lt: \"lt\",\n lb: \"lb\",\n mk: \"mk\",\n mg: \"mg\",\n ms: \"ms\",\n ml: \"ml\",\n mt: \"mt\",\n mi: \"mi\",\n mr: \"mr\",\n mn: \"mn\",\n ne: \"ne\",\n no: \"no\",\n nn: \"nn\",\n oc: \"oc\",\n pa: \"pa\",\n ps: \"ps\",\n fa: \"fa\",\n pl: \"pl\",\n ro: \"ro\",\n ru: \"ru\",\n sa: \"sa\",\n sr: \"sr\",\n sn: \"sn\",\n sd: \"sd\",\n si: \"si\",\n sk: \"sk\",\n sl: \"sl\",\n so: \"so\",\n su: \"su\",\n sw: \"sw\",\n sv: \"sv\",\n tl: \"tl\",\n tg: \"tg\",\n ta: \"ta\",\n tt: \"tt\",\n te: \"te\",\n th: \"th\",\n bo: \"bo\",\n tr: \"tr\",\n tk: \"tk\",\n uk: \"uk\",\n ur: \"ur\",\n uz: \"uz\",\n vi: \"vi\",\n cy: \"cy\",\n yi: \"yi\",\n yo: \"yo\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The status of the transcript. Either completed or error.\n */\nexport type TranscriptReadyStatus =\n (typeof TranscriptReadyStatus)[keyof typeof TranscriptReadyStatus]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptReadyStatus = {\n completed: \"completed\",\n error: \"error\"\n} as const\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * AssemblyAI API\n * AssemblyAI API\n * OpenAPI spec version: 1.3.4\n */\n\n/**\n * The status of your transcript. Possible values are queued, processing, completed, or error.\n */\nexport type TranscriptStatus = (typeof TranscriptStatus)[keyof typeof TranscriptStatus]\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const TranscriptStatus = {\n queued: \"queued\",\n processing: \"processing\",\n completed: \"completed\",\n error: \"error\"\n} as const\n","/**\n * Deepgram transcription provider adapter\n * Documentation: https://developers.deepgram.com/\n */\n\nimport axios, { type AxiosInstance } from \"axios\"\nimport WebSocket from \"ws\"\nimport type {\n AudioChunk,\n AudioInput,\n ProviderCapabilities,\n StreamingCallbacks,\n StreamingOptions,\n StreamingSession,\n TranscribeOptions,\n UnifiedTranscriptResponse\n} from \"../router/types\"\nimport { BaseAdapter, type ProviderConfig } from \"./base-adapter\"\n\n// Import Deepgram generated types\nimport type { ListenV1Response } from \"../generated/deepgram/schema/listenV1Response\"\nimport type { ListenV1MediaTranscribeParams } from \"../generated/deepgram/schema/listenV1MediaTranscribeParams\"\nimport type { ListenV1ResponseResultsChannelsItemAlternativesItem } from \"../generated/deepgram/schema/listenV1ResponseResultsChannelsItemAlternativesItem\"\nimport type { ListenV1ResponseResultsChannelsItemAlternativesItemWordsItem } from \"../generated/deepgram/schema/listenV1ResponseResultsChannelsItemAlternativesItemWordsItem\"\nimport type { ListenV1ResponseResultsUtterancesItem } from \"../generated/deepgram/schema/listenV1ResponseResultsUtterancesItem\"\n\n// WebSocket message types (not in OpenAPI spec, manually defined from Deepgram docs)\ninterface DeepgramResultsMessage {\n type: \"Results\"\n is_final: boolean\n channel: {\n alternatives: Array<{\n transcript: string\n confidence: number\n words?: Array<{\n word: string\n start: number\n end: number\n confidence: number\n }>\n }>\n }\n}\n\ninterface DeepgramUtteranceEndMessage {\n type: \"UtteranceEnd\"\n [key: string]: unknown\n}\n\ninterface DeepgramMetadataMessage {\n type: \"Metadata\"\n [key: string]: unknown\n}\n\ntype DeepgramRealtimeMessage =\n | DeepgramResultsMessage\n | DeepgramUtteranceEndMessage\n | DeepgramMetadataMessage\n\n/**\n * Deepgram transcription provider adapter\n *\n * Implements transcription for the Deepgram API with support for:\n * - Synchronous pre-recorded transcription\n * - Real-time streaming transcription (WebSocket)\n * - Speaker diarization (identifying different speakers)\n * - Multi-language detection and transcription\n * - Summarization and sentiment analysis\n * - Entity detection and intent recognition\n * - Custom vocabulary (keywords)\n * - Word-level timestamps with high precision\n * - PII redaction\n *\n * @see https://developers.deepgram.com/ Deepgram API Documentation\n *\n * @example Basic transcription\n * ```typescript\n * import { DeepgramAdapter } from '@meeting-baas/sdk';\n *\n * const adapter = new DeepgramAdapter();\n * adapter.initialize({\n * apiKey: process.env.DEEPGRAM_API_KEY\n * });\n *\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en',\n * diarization: true\n * });\n *\n * console.log(result.data.text);\n * console.log(result.data.speakers);\n * ```\n *\n * @example With advanced features\n * ```typescript\n * const result = await adapter.transcribe(audio, {\n * language: 'en',\n * diarization: true,\n * summarization: true,\n * sentimentAnalysis: true,\n * entityDetection: true,\n * customVocabulary: ['TypeScript', 'JavaScript', 'API']\n * });\n *\n * console.log('Summary:', result.data.summary);\n * console.log('Sentiment:', result.data.metadata?.sentiment);\n * ```\n */\nexport class DeepgramAdapter extends BaseAdapter {\n readonly name = \"deepgram\" as const\n readonly capabilities: ProviderCapabilities = {\n streaming: true,\n diarization: true,\n wordTimestamps: true,\n languageDetection: true,\n customVocabulary: true,\n summarization: true,\n sentimentAnalysis: true,\n entityDetection: true,\n piiRedaction: true\n }\n\n private client?: AxiosInstance\n protected baseUrl = \"https://api.deepgram.com/v1\"\n private wsBaseUrl = \"wss://api.deepgram.com/v1/listen\"\n\n initialize(config: ProviderConfig): void {\n super.initialize(config)\n\n this.client = axios.create({\n baseURL: config.baseUrl || this.baseUrl,\n timeout: config.timeout || 60000,\n headers: {\n Authorization: `Token ${config.apiKey}`,\n \"Content-Type\": \"application/json\",\n ...config.headers\n }\n })\n }\n\n /**\n * Submit audio for transcription\n *\n * Sends audio to Deepgram API for transcription. Deepgram processes\n * synchronously and returns results immediately (no polling required).\n *\n * @param audio - Audio input (URL or file buffer)\n * @param options - Transcription options\n * @param options.language - Language code (e.g., 'en', 'es', 'fr')\n * @param options.languageDetection - Enable automatic language detection\n * @param options.diarization - Enable speaker identification (diarize)\n * @param options.speakersExpected - Expected number of speakers\n * @param options.summarization - Generate text summary\n * @param options.sentimentAnalysis - Analyze sentiment\n * @param options.entityDetection - Detect named entities\n * @param options.piiRedaction - Redact personally identifiable information\n * @param options.customVocabulary - Keywords to boost in recognition\n * @param options.webhookUrl - Callback URL for async processing\n * @returns Normalized transcription response\n *\n * @example Simple transcription\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * });\n * ```\n *\n * @example With advanced features\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * }, {\n * language: 'en',\n * diarization: true,\n * summarization: true,\n * sentimentAnalysis: true,\n * entityDetection: true,\n * customVocabulary: ['API', 'TypeScript', 'JavaScript']\n * });\n * ```\n */\n async transcribe(\n audio: AudioInput,\n options?: TranscribeOptions\n ): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Build query parameters from options\n const params = this.buildTranscriptionParams(options)\n\n let response: ListenV1Response\n\n if (audio.type === \"url\") {\n // URL-based transcription\n response = await this.client!.post<ListenV1Response>(\n \"/listen\",\n { url: audio.url },\n { params }\n ).then((res) => res.data)\n } else if (audio.type === \"file\") {\n // File-based transcription\n response = await this.client!.post<ListenV1Response>(\"/listen\", audio.file, {\n params,\n headers: {\n \"Content-Type\": \"audio/*\"\n }\n }).then((res) => res.data)\n } else {\n throw new Error(\n \"Deepgram adapter does not support stream type for pre-recorded transcription. Use transcribeStream() for real-time streaming.\"\n )\n }\n\n // Deepgram returns results immediately (synchronous)\n return this.normalizeResponse(response)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Get transcription result by ID\n *\n * Note: Deepgram processes synchronously, so this method is primarily\n * for retrieving cached results if you've stored the request ID.\n * The initial transcribe() call already returns complete results.\n *\n * @param transcriptId - Request ID from Deepgram\n * @returns Normalized transcription response\n */\n async getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n // Deepgram doesn't have a \"get by ID\" endpoint for pre-recorded audio\n // Results are returned immediately on transcription\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"NOT_SUPPORTED\",\n message:\n \"Deepgram returns transcription results immediately. Store the response from transcribe() instead of using getTranscript().\"\n }\n }\n }\n\n /**\n * Build Deepgram transcription parameters from unified options\n */\n private buildTranscriptionParams(options?: TranscribeOptions): ListenV1MediaTranscribeParams {\n const params: ListenV1MediaTranscribeParams = {}\n\n if (!options) {\n return params\n }\n\n // Language configuration\n if (options.language) {\n params.language = options.language\n }\n\n if (options.languageDetection) {\n params.detect_language = true\n }\n\n // Speaker diarization\n if (options.diarization) {\n params.diarize = true\n }\n\n // Custom vocabulary (keywords in Deepgram)\n if (options.customVocabulary && options.customVocabulary.length > 0) {\n params.keywords = options.customVocabulary\n }\n\n // Summarization\n if (options.summarization) {\n params.summarize = true\n }\n\n // Sentiment analysis\n if (options.sentimentAnalysis) {\n params.sentiment = true\n }\n\n // Entity detection\n if (options.entityDetection) {\n params.detect_entities = true\n }\n\n // PII redaction\n if (options.piiRedaction) {\n params.redact = [\"pci\", \"pii\"]\n }\n\n // Webhook callback\n if (options.webhookUrl) {\n params.callback = options.webhookUrl\n }\n\n // Enable features for better results\n params.punctuate = true\n params.utterances = true\n params.smart_format = true\n\n return params\n }\n\n /**\n * Normalize Deepgram response to unified format\n */\n private normalizeResponse(response: ListenV1Response): UnifiedTranscriptResponse {\n // Deepgram returns results immediately\n const channel = response.results.channels?.[0]\n const alternative = channel?.alternatives?.[0]\n\n if (!alternative) {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"NO_RESULTS\",\n message: \"No transcription results returned by Deepgram\"\n },\n raw: response\n }\n }\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: response.metadata?.request_id || \"\",\n text: alternative.transcript || \"\",\n confidence: alternative.confidence,\n status: \"completed\", // Deepgram returns completed results immediately\n language: channel?.detected_language || undefined,\n duration: response.metadata?.duration,\n speakers: this.extractSpeakers(response),\n words: this.extractWords(alternative),\n utterances: this.extractUtterances(response),\n summary: this.extractSummary(alternative),\n metadata: {\n modelInfo: response.metadata?.model_info,\n channels: response.metadata?.channels,\n sentiment: response.results.sentiments,\n intents: response.results.intents,\n topics: response.results.topics\n }\n },\n raw: response\n }\n }\n\n /**\n * Extract speaker information from Deepgram response\n */\n private extractSpeakers(response: ListenV1Response) {\n const utterances = response.results.utterances\n\n if (!utterances || utterances.length === 0) {\n return undefined\n }\n\n // Extract unique speakers from utterances\n const speakerSet = new Set<number>()\n utterances.forEach((utterance: ListenV1ResponseResultsUtterancesItem) => {\n if (utterance.speaker !== undefined) {\n speakerSet.add(utterance.speaker)\n }\n })\n\n if (speakerSet.size === 0) {\n return undefined\n }\n\n return Array.from(speakerSet).map((speakerId) => ({\n id: speakerId.toString(),\n label: `Speaker ${speakerId}`\n }))\n }\n\n /**\n * Extract word timestamps from Deepgram response\n */\n private extractWords(alternative: ListenV1ResponseResultsChannelsItemAlternativesItem) {\n if (!alternative.words || alternative.words.length === 0) {\n return undefined\n }\n\n return alternative.words.map(\n (word: ListenV1ResponseResultsChannelsItemAlternativesItemWordsItem) => ({\n text: word.word || \"\",\n start: word.start || 0,\n end: word.end || 0,\n confidence: word.confidence,\n speaker: undefined // Speaker info is at utterance level, not word level\n })\n )\n }\n\n /**\n * Extract utterances from Deepgram response\n */\n private extractUtterances(response: ListenV1Response) {\n const utterances = response.results.utterances\n\n if (!utterances || utterances.length === 0) {\n return undefined\n }\n\n return utterances.map((utterance: ListenV1ResponseResultsUtterancesItem) => ({\n text: utterance.transcript || \"\",\n start: utterance.start || 0,\n end: utterance.end || 0,\n speaker: utterance.speaker?.toString(),\n confidence: utterance.confidence,\n words: utterance.words?.map((word) => ({\n text: word.word || \"\",\n start: word.start || 0,\n end: word.end || 0,\n confidence: word.confidence\n }))\n }))\n }\n\n /**\n * Extract summary from Deepgram response\n */\n private extractSummary(\n alternative: ListenV1ResponseResultsChannelsItemAlternativesItem\n ): string | undefined {\n if (!alternative.summaries || alternative.summaries.length === 0) {\n return undefined\n }\n\n // Combine all summaries into one\n return alternative.summaries\n .map((summary) => summary.summary)\n .filter(Boolean)\n .join(\" \")\n }\n\n /**\n * Stream audio for real-time transcription\n *\n * Creates a WebSocket connection to Deepgram for streaming transcription.\n * Send audio chunks via session.sendAudio() and receive results via callbacks.\n *\n * @param options - Streaming configuration options\n * @param callbacks - Event callbacks for transcription results\n * @returns Promise that resolves with a StreamingSession\n *\n * @example Real-time streaming\n * ```typescript\n * const session = await adapter.transcribeStream({\n * encoding: 'linear16',\n * sampleRate: 16000,\n * channels: 1,\n * language: 'en',\n * diarization: true,\n * interimResults: true\n * }, {\n * onOpen: () => console.log('Connected'),\n * onTranscript: (event) => {\n * if (event.isFinal) {\n * console.log('Final:', event.text);\n * } else {\n * console.log('Interim:', event.text);\n * }\n * },\n * onError: (error) => console.error('Error:', error),\n * onClose: () => console.log('Disconnected')\n * });\n *\n * // Send audio chunks\n * const audioChunk = getAudioChunk(); // Your audio source\n * await session.sendAudio({ data: audioChunk });\n *\n * // Close when done\n * await session.close();\n * ```\n */\n async transcribeStream(\n options?: StreamingOptions,\n callbacks?: StreamingCallbacks\n ): Promise<StreamingSession> {\n this.validateConfig()\n\n // Build query parameters for WebSocket URL\n const params = new URLSearchParams()\n\n if (options?.encoding) params.append(\"encoding\", options.encoding)\n if (options?.sampleRate) params.append(\"sample_rate\", options.sampleRate.toString())\n if (options?.channels) params.append(\"channels\", options.channels.toString())\n if (options?.language) params.append(\"language\", options.language)\n if (options?.languageDetection) params.append(\"detect_language\", \"true\")\n if (options?.diarization) params.append(\"diarize\", \"true\")\n if (options?.interimResults) params.append(\"interim_results\", \"true\")\n if (options?.summarization) params.append(\"summarize\", \"true\")\n if (options?.sentimentAnalysis) params.append(\"sentiment\", \"true\")\n if (options?.entityDetection) params.append(\"detect_entities\", \"true\")\n if (options?.piiRedaction) params.append(\"redact\", \"pii\")\n if (options?.customVocabulary && options.customVocabulary.length > 0) {\n params.append(\"keywords\", options.customVocabulary.join(\",\"))\n }\n\n const wsUrl = `${this.wsBaseUrl}?${params.toString()}`\n\n // Create WebSocket connection\n const ws = new WebSocket(wsUrl, {\n headers: {\n Authorization: `Token ${this.config!.apiKey}`\n }\n })\n\n let sessionStatus: \"connecting\" | \"open\" | \"closing\" | \"closed\" = \"connecting\"\n const sessionId = `deepgram-${Date.now()}-${Math.random().toString(36).substring(7)}`\n\n // Handle WebSocket events\n ws.on(\"open\", () => {\n sessionStatus = \"open\"\n callbacks?.onOpen?.()\n })\n\n ws.on(\"message\", (data: Buffer) => {\n try {\n const message = JSON.parse(data.toString()) as DeepgramRealtimeMessage\n\n // Handle different message types from Deepgram - TYPE SAFE!\n if (message.type === \"Results\") {\n // Type narrowed to DeepgramResultsMessage\n const channel = message.channel.alternatives[0]\n\n if (channel) {\n const transcript = channel.transcript\n const isFinal = message.is_final\n const words = channel.words?.map((word) => ({\n text: word.word,\n start: word.start,\n end: word.end,\n confidence: word.confidence\n }))\n\n callbacks?.onTranscript?.({\n type: \"transcript\",\n text: transcript,\n isFinal,\n words,\n confidence: channel.confidence,\n data: message\n })\n }\n } else if (message.type === \"UtteranceEnd\") {\n // Type narrowed to DeepgramUtteranceEndMessage\n callbacks?.onMetadata?.(message)\n } else if (message.type === \"Metadata\") {\n // Type narrowed to DeepgramMetadataMessage\n callbacks?.onMetadata?.(message)\n }\n } catch (error) {\n callbacks?.onError?.({\n code: \"PARSE_ERROR\",\n message: \"Failed to parse WebSocket message\",\n details: error\n })\n }\n })\n\n ws.on(\"error\", (error: Error) => {\n callbacks?.onError?.({\n code: \"WEBSOCKET_ERROR\",\n message: error.message,\n details: error\n })\n })\n\n ws.on(\"close\", (code: number, reason: Buffer) => {\n sessionStatus = \"closed\"\n callbacks?.onClose?.(code, reason.toString())\n })\n\n // Wait for connection to open\n await new Promise<void>((resolve, reject) => {\n const timeout = setTimeout(() => {\n reject(new Error(\"WebSocket connection timeout\"))\n }, 10000)\n\n ws.once(\"open\", () => {\n clearTimeout(timeout)\n resolve()\n })\n\n ws.once(\"error\", (error) => {\n clearTimeout(timeout)\n reject(error)\n })\n })\n\n // Return StreamingSession interface\n return {\n id: sessionId,\n provider: this.name,\n createdAt: new Date(),\n getStatus: () => sessionStatus,\n sendAudio: async (chunk: AudioChunk) => {\n if (sessionStatus !== \"open\") {\n throw new Error(`Cannot send audio: session is ${sessionStatus}`)\n }\n\n if (ws.readyState !== WebSocket.OPEN) {\n throw new Error(\"WebSocket is not open\")\n }\n\n // Send audio data\n ws.send(chunk.data)\n\n // Send close message if this is the last chunk\n if (chunk.isLast) {\n ws.send(JSON.stringify({ type: \"CloseStream\" }))\n }\n },\n close: async () => {\n if (sessionStatus === \"closed\" || sessionStatus === \"closing\") {\n return\n }\n\n sessionStatus = \"closing\"\n\n // Send CloseStream message before closing\n if (ws.readyState === WebSocket.OPEN) {\n ws.send(JSON.stringify({ type: \"CloseStream\" }))\n }\n\n // Close WebSocket\n return new Promise<void>((resolve) => {\n const timeout = setTimeout(() => {\n ws.terminate()\n resolve()\n }, 5000)\n\n ws.close()\n\n ws.once(\"close\", () => {\n clearTimeout(timeout)\n sessionStatus = \"closed\"\n resolve()\n })\n })\n }\n }\n }\n}\n\n/**\n * Factory function to create a Deepgram adapter\n */\nexport function createDeepgramAdapter(config: ProviderConfig): DeepgramAdapter {\n const adapter = new DeepgramAdapter()\n adapter.initialize(config)\n return adapter\n}\n","/**\n * Azure Speech-to-Text transcription provider adapter\n * Documentation: https://learn.microsoft.com/azure/cognitive-services/speech-service/\n */\n\nimport axios from \"axios\"\nimport type {\n AudioInput,\n ProviderCapabilities,\n TranscribeOptions,\n UnifiedTranscriptResponse\n} from \"../router/types\"\nimport { BaseAdapter, type ProviderConfig } from \"./base-adapter\"\n\n// Import generated API client functions - FULL TYPE SAFETY!\nimport {\n transcriptionsCreate,\n transcriptionsGet,\n transcriptionsListFiles\n} from \"../generated/azure/api/speechServicesAPIV31\"\n\n// Import Azure generated types\nimport type { Transcription } from \"../generated/azure/schema/transcription\"\nimport type { TranscriptionProperties } from \"../generated/azure/schema/transcriptionProperties\"\n\n/**\n * Azure Speech-to-Text transcription provider adapter\n *\n * Implements transcription for Azure Cognitive Services Speech API with support for:\n * - Batch transcription (async processing)\n * - Speaker diarization (identifying different speakers)\n * - Multi-language support\n * - Custom models and acoustic models\n * - Word-level timestamps\n * - Profanity filtering\n * - Punctuation and capitalization\n *\n * @see https://learn.microsoft.com/azure/cognitive-services/speech-service/ Azure Speech Documentation\n *\n * @example Basic transcription\n * ```typescript\n * import { AzureSTTAdapter } from '@meeting-baas/sdk';\n *\n * const adapter = new AzureSTTAdapter();\n * adapter.initialize({\n * apiKey: process.env.AZURE_SPEECH_KEY,\n * region: 'eastus' // Your Azure region\n * });\n *\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en-US',\n * diarization: true\n * });\n *\n * console.log(result.data.text);\n * ```\n *\n * @example With custom model\n * ```typescript\n * const result = await adapter.transcribe(audio, {\n * language: 'en-US',\n * diarization: true,\n * metadata: {\n * modelId: 'custom-model-id'\n * }\n * });\n * ```\n *\n * @example Async transcription with polling (Azure always returns job ID)\n * ```typescript\n * // Submit transcription (Azure is always async)\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en-US',\n * diarization: true\n * });\n *\n * // Get transcription ID for polling\n * const transcriptionId = result.data?.id;\n * console.log('Transcription ID:', transcriptionId);\n *\n * // Poll for completion\n * const poll = async () => {\n * const status = await adapter.getTranscript(transcriptionId);\n * if (status.data?.status === 'completed') {\n * console.log('Transcript:', status.data.text);\n * } else if (status.data?.status === 'processing') {\n * setTimeout(poll, 5000); // Poll every 5 seconds\n * }\n * };\n * await poll();\n * ```\n */\nexport class AzureSTTAdapter extends BaseAdapter {\n readonly name = \"azure-stt\" as const\n readonly capabilities: ProviderCapabilities = {\n streaming: false, // Batch transcription only\n diarization: true,\n wordTimestamps: true,\n languageDetection: false,\n customVocabulary: true,\n summarization: false,\n sentimentAnalysis: false,\n entityDetection: false,\n piiRedaction: false\n }\n\n private region?: string\n protected baseUrl = \"https://eastus.api.cognitive.microsoft.com/speechtotext/v3.1\" // Default, overridden in initialize()\n\n initialize(config: ProviderConfig & { region?: string }): void {\n super.initialize(config)\n\n this.region = config.region || \"eastus\"\n this.baseUrl =\n config.baseUrl || `https://${this.region}.api.cognitive.microsoft.com/speechtotext/v3.1`\n }\n\n /**\n * Get axios config for generated API client functions\n * Configures headers and base URL using Azure subscription key\n */\n protected getAxiosConfig() {\n return super.getAxiosConfig(\"Ocp-Apim-Subscription-Key\")\n }\n\n /**\n * Submit audio for transcription\n *\n * Azure Speech-to-Text uses batch transcription which processes asynchronously.\n * You need to poll getTranscript() to retrieve the completed transcription.\n *\n * @param audio - Audio input (URL only for batch transcription)\n * @param options - Transcription options\n * @returns Response with transcription ID for polling\n */\n async transcribe(\n audio: AudioInput,\n options?: TranscribeOptions\n ): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n if (audio.type !== \"url\") {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"INVALID_INPUT\",\n message: \"Azure Speech-to-Text batch transcription only supports URL input\"\n }\n }\n }\n\n try {\n const transcriptionRequest: Partial<Transcription> = {\n displayName: (options?.metadata?.displayName as string) || \"SDK Transcription\",\n description: (options?.metadata?.description as string) || \"\",\n locale: options?.language || \"en-US\",\n contentUrls: [audio.url],\n properties: this.buildTranscriptionProperties(options)\n }\n\n // Use generated API client function - FULLY TYPED!\n const response = await transcriptionsCreate(\n transcriptionRequest as Transcription,\n this.getAxiosConfig()\n )\n\n const transcription = response.data\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: transcription.self?.split(\"/\").pop() || \"\",\n text: \"\", // Will be populated after polling\n status: this.normalizeStatus(transcription.status),\n language: transcription.locale,\n createdAt: transcription.createdDateTime\n },\n raw: transcription\n }\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Get transcription result by ID\n *\n * Poll this method to check transcription status and retrieve results.\n *\n * @param transcriptId - Transcription ID from Azure\n * @returns Transcription response with status and results\n */\n async getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Get transcription status using generated API\n const statusResponse = await transcriptionsGet(transcriptId, this.getAxiosConfig())\n\n const transcription = statusResponse.data\n const status = this.normalizeStatus(transcription.status)\n\n if (status !== \"completed\") {\n return {\n success: true,\n provider: this.name,\n data: {\n id: transcriptId,\n text: \"\",\n status,\n language: transcription.locale,\n createdAt: transcription.createdDateTime\n },\n raw: transcription\n }\n }\n\n // Get transcription files (results) using generated API\n if (!transcription.links?.files) {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"NO_RESULTS\",\n message: \"Transcription completed but no result files available\"\n },\n raw: transcription\n }\n }\n\n const filesResponse = await transcriptionsListFiles(\n transcriptId,\n undefined,\n this.getAxiosConfig()\n )\n const files = filesResponse.data?.values || []\n\n // Find the transcription result file\n const resultFile = files.find((file: any) => file.kind === \"Transcription\")\n\n if (!resultFile?.links?.contentUrl) {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"NO_RESULTS\",\n message: \"Transcription result file not found\"\n },\n raw: transcription\n }\n }\n\n // Fetch the actual transcription content (contentUrl is an external link, not part of API)\n const contentResponse = await axios.get(resultFile.links.contentUrl)\n const transcriptionData = contentResponse.data\n\n return this.normalizeResponse(transcription, transcriptionData)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Build Azure-specific transcription properties\n */\n private buildTranscriptionProperties(options?: TranscribeOptions): TranscriptionProperties {\n const properties: any = {\n wordLevelTimestampsEnabled: options?.wordTimestamps ?? true,\n punctuationMode: \"DictatedAndAutomatic\",\n profanityFilterMode: \"Masked\"\n }\n\n if (options?.diarization) {\n properties.diarizationEnabled = true\n if (options.speakersExpected) {\n properties.diarization = {\n speakers: {\n minCount: 1,\n maxCount: options.speakersExpected\n }\n }\n }\n }\n\n if (options?.customVocabulary && options.customVocabulary.length > 0) {\n properties.customProperties = {\n phrases: options.customVocabulary.join(\",\")\n }\n }\n\n return properties\n }\n\n /**\n * Normalize Azure status to unified status\n */\n private normalizeStatus(status: any): \"queued\" | \"processing\" | \"completed\" | \"error\" {\n const statusStr = status?.toString().toLowerCase() || \"\"\n\n if (statusStr.includes(\"succeeded\")) return \"completed\"\n if (statusStr.includes(\"running\")) return \"processing\"\n if (statusStr.includes(\"notstarted\")) return \"queued\"\n if (statusStr.includes(\"failed\")) return \"error\"\n\n return \"queued\"\n }\n\n /**\n * Normalize Azure transcription response to unified format\n */\n private normalizeResponse(\n transcription: Transcription,\n transcriptionData: any\n ): UnifiedTranscriptResponse {\n const combinedPhrases = transcriptionData.combinedRecognizedPhrases || []\n const recognizedPhrases = transcriptionData.recognizedPhrases || []\n\n // Get full text from combined phrases\n const fullText =\n combinedPhrases.map((phrase: any) => phrase.display || phrase.lexical).join(\" \") || \"\"\n\n // Extract words with timestamps\n const words = recognizedPhrases.flatMap((phrase: any) =>\n (phrase.nBest?.[0]?.words || []).map((word: any) => ({\n text: word.word,\n start: word.offsetInTicks / 10000000, // Convert ticks to seconds\n end: (word.offsetInTicks + word.durationInTicks) / 10000000,\n confidence: word.confidence,\n speaker: phrase.speaker !== undefined ? phrase.speaker.toString() : undefined\n }))\n )\n\n // Extract speakers if diarization was enabled\n const speakers =\n recognizedPhrases.length > 0 && recognizedPhrases[0].speaker !== undefined\n ? Array.from(\n new Set(\n recognizedPhrases.map((p: any) => p.speaker).filter((s: any) => s !== undefined)\n )\n ).map((speakerId: unknown) => ({\n id: String(speakerId),\n label: `Speaker ${speakerId}`\n }))\n : undefined\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: transcription.self?.split(\"/\").pop() || \"\",\n text: fullText,\n confidence: recognizedPhrases[0]?.nBest?.[0]?.confidence,\n status: \"completed\",\n language: transcription.locale,\n duration: transcriptionData.duration ? transcriptionData.duration / 10000000 : undefined,\n speakers,\n words: words.length > 0 ? words : undefined,\n createdAt: transcription.createdDateTime,\n completedAt: transcription.lastActionDateTime\n },\n raw: {\n transcription,\n transcriptionData\n }\n }\n }\n}\n\n/**\n * Factory function to create an Azure STT adapter\n */\nexport function createAzureSTTAdapter(\n config: ProviderConfig & { region?: string }\n): AzureSTTAdapter {\n const adapter = new AzureSTTAdapter()\n adapter.initialize(config)\n return adapter\n}\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * Speech Services API v3.1\n * Speech Services API v3.1.\n * OpenAPI spec version: v3.1\n */\n\nimport { faker } from \"@faker-js/faker\"\nimport type { AxiosRequestConfig, AxiosResponse } from \"axios\"\nimport axios from \"axios\"\nimport { delay, HttpResponse, http } from \"msw\"\nimport type {\n BaseModel,\n CommitBlocksEntry,\n CustomModel,\n Dataset,\n DatasetLocales,\n DatasetsGetFileParams,\n DatasetsListFilesParams,\n DatasetsListParams,\n DatasetsUploadBlockParams,\n DatasetsUploadBody,\n DatasetUpdate,\n Endpoint,\n EndpointsDeleteBaseModelLogsParams,\n EndpointsDeleteLogsParams,\n EndpointsGetBaseModelLogParams,\n EndpointsGetLogParams,\n EndpointsListBaseModelLogsParams,\n EndpointsListLogsParams,\n EndpointsListParams,\n EndpointUpdate,\n Evaluation,\n EvaluationsGetFileParams,\n EvaluationsListFilesParams,\n EvaluationsListParams,\n EvaluationUpdate,\n File,\n ModelCopy,\n ModelManifest,\n ModelsGetBaseModelManifestParams,\n ModelsGetCustomModelManifestParams,\n ModelsGetFileParams,\n ModelsListBaseModelsParams,\n ModelsListCustomModelsParams,\n ModelsListFilesParams,\n ModelUpdate,\n PaginatedBaseModels,\n PaginatedCustomModels,\n PaginatedDatasets,\n PaginatedEndpoints,\n PaginatedEvaluations,\n PaginatedFiles,\n PaginatedProjects,\n PaginatedTranscriptions,\n PaginatedWebHooks,\n Project,\n ProjectsListDatasetsParams,\n ProjectsListEndpointsParams,\n ProjectsListEvaluationsParams,\n ProjectsListModelsParams,\n ProjectsListParams,\n ProjectsListTranscriptionsParams,\n ProjectUpdate,\n ServiceHealth,\n Transcription,\n TranscriptionsGetFileParams,\n TranscriptionsListFilesParams,\n TranscriptionsListParams,\n TranscriptionUpdate,\n UploadedBlocks,\n WebHook,\n WebHooksListParams,\n WebHookUpdate\n} from \"../schema\"\nimport {\n DatasetKind,\n FileKind,\n HealthStatus,\n ProfanityFilterMode,\n PunctuationMode,\n Status\n} from \"../schema\"\n\n// https://stackoverflow.com/questions/49579094/typescript-conditional-types-filter-out-readonly-properties-pick-only-requir/49579497#49579497\ntype IfEquals<X, Y, A = X, B = never> = (<T>() => T extends X ? 1 : 2) extends <T>() => T extends Y\n ? 1\n : 2\n ? A\n : B\n\ntype WritableKeys<T> = {\n [P in keyof T]-?: IfEquals<{ [Q in P]: T[P] }, { -readonly [Q in P]: T[P] }, P>\n}[keyof T]\n\ntype UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends (k: infer I) => void\n ? I\n : never\ntype DistributeReadOnlyOverUnions<T> = T extends any ? NonReadonly<T> : never\n\ntype Writable<T> = Pick<T, WritableKeys<T>>\ntype NonReadonly<T> = [T] extends [UnionToIntersection<T>]\n ? {\n [P in keyof Writable<T>]: T[P] extends object ? NonReadonly<NonNullable<T[P]>> : T[P]\n }\n : DistributeReadOnlyOverUnions<T>\n\n/**\n * @summary Gets a list of supported locales for datasets.\n */\nexport const datasetsListSupportedLocales = <TData = AxiosResponse<DatasetLocales>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/datasets/locales\", options)\n}\n\n/**\n * @summary Gets a list of datasets for the authenticated subscription.\n */\nexport const datasetsList = <TData = AxiosResponse<PaginatedDatasets>>(\n params?: DatasetsListParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/datasets\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Uploads and creates a new dataset by getting the data from a specified URL or starts waiting for data blocks to be uploaded.\n */\nexport const datasetsCreate = <TData = AxiosResponse<Dataset>>(\n dataset: NonReadonly<Dataset>,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/datasets\", dataset, options)\n}\n\n/**\n * @summary Gets the dataset identified by the given ID.\n */\nexport const datasetsGet = <TData = AxiosResponse<Dataset>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/datasets/${id}`, options)\n}\n\n/**\n * @summary Updates the mutable details of the dataset identified by its ID.\n */\nexport const datasetsUpdate = <TData = AxiosResponse<Dataset>>(\n id: string,\n datasetUpdate: DatasetUpdate,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/datasets/${id}`, datasetUpdate, options)\n}\n\n/**\n * @summary Deletes the specified dataset.\n */\nexport const datasetsDelete = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/datasets/${id}`, options)\n}\n\n/**\n * @summary Gets the list of uploaded blocks for this dataset.\n */\nexport const datasetsGetBlocks = <TData = AxiosResponse<UploadedBlocks>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/datasets/${id}/blocks`, options)\n}\n\n/**\n * @summary Upload a block of data for the dataset. The maximum size of the block is 8MiB.\n */\nexport const datasetsUploadBlock = <TData = AxiosResponse<void>>(\n id: string,\n datasetsUploadBlockBody: Blob,\n params: DatasetsUploadBlockParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.put(`/datasets/${id}/blocks`, datasetsUploadBlockBody, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @deprecated\n * @summary Uploads data and creates a new dataset.\n */\nexport const datasetsUpload = <TData = AxiosResponse<Dataset>>(\n datasetsUploadBody: DatasetsUploadBody,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n const formData = new FormData()\n if (datasetsUploadBody.project !== undefined) {\n formData.append(\"project\", datasetsUploadBody.project)\n }\n formData.append(\"displayName\", datasetsUploadBody.displayName)\n if (datasetsUploadBody.description !== undefined) {\n formData.append(\"description\", datasetsUploadBody.description)\n }\n formData.append(\"locale\", datasetsUploadBody.locale)\n formData.append(\"kind\", datasetsUploadBody.kind)\n if (datasetsUploadBody.customProperties !== undefined) {\n formData.append(\"customProperties\", datasetsUploadBody.customProperties)\n }\n if (datasetsUploadBody.data !== undefined) {\n formData.append(\"data\", datasetsUploadBody.data)\n }\n if (datasetsUploadBody.email !== undefined) {\n formData.append(\"email\", datasetsUploadBody.email)\n }\n\n return axios.post(\"/datasets/upload\", formData, options)\n}\n\n/**\n * @summary Commit block list to complete the upload of the dataset.\n */\nexport const datasetsCommitBlocks = <TData = AxiosResponse<void>>(\n id: string,\n commitBlocksEntry: CommitBlocksEntry[],\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(`/datasets/${id}/blocks:commit`, commitBlocksEntry, options)\n}\n\n/**\n * @summary Gets the files of the dataset identified by the given ID.\n */\nexport const datasetsListFiles = <TData = AxiosResponse<PaginatedFiles>>(\n id: string,\n params?: DatasetsListFilesParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/datasets/${id}/files`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets one specific file (identified with fileId) from a dataset (identified with id).\n */\nexport const datasetsGetFile = <TData = AxiosResponse<File>>(\n id: string,\n fileId: string,\n params?: DatasetsGetFileParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/datasets/${id}/files/${fileId}`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets a list of supported locales for endpoint creations.\n */\nexport const endpointsListSupportedLocales = <TData = AxiosResponse<string[]>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/endpoints/locales\", options)\n}\n\n/**\n * @summary Gets the list of endpoints for the authenticated subscription.\n */\nexport const endpointsList = <TData = AxiosResponse<PaginatedEndpoints>>(\n params?: EndpointsListParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/endpoints\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Creates a new endpoint.\n */\nexport const endpointsCreate = <TData = AxiosResponse<Endpoint>>(\n endpoint: NonReadonly<Endpoint>,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/endpoints\", endpoint, options)\n}\n\n/**\n * @summary Gets the endpoint identified by the given ID.\n */\nexport const endpointsGet = <TData = AxiosResponse<Endpoint>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/endpoints/${id}`, options)\n}\n\n/**\n * @summary Deletes the endpoint identified by the given ID.\n */\nexport const endpointsDelete = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/endpoints/${id}`, options)\n}\n\n/**\n * @summary Updates the metadata of the endpoint identified by the given ID.\n */\nexport const endpointsUpdate = <TData = AxiosResponse<Endpoint>>(\n id: string,\n endpointUpdate: EndpointUpdate,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/endpoints/${id}`, endpointUpdate, options)\n}\n\n/**\n * @summary Gets the list of audio and transcription logs that have been stored for a given endpoint.\n */\nexport const endpointsListLogs = <TData = AxiosResponse<PaginatedFiles>>(\n id: string,\n params?: EndpointsListLogsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/endpoints/${id}/files/logs`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * The deletion process is done asynchronously and can take up to one day depending on the amount of log files.\n * @summary Deletes the specified audio and transcription logs that have been stored for a given endpoint. It deletes all logs before (and including) a specific day.\n */\nexport const endpointsDeleteLogs = <TData = AxiosResponse<void>>(\n id: string,\n params?: EndpointsDeleteLogsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/endpoints/${id}/files/logs`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets a specific audio or transcription log for a given endpoint.\n */\nexport const endpointsGetLog = <TData = AxiosResponse<File>>(\n id: string,\n logId: string,\n params?: EndpointsGetLogParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/endpoints/${id}/files/logs/${logId}`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Deletes one audio or transcription log that have been stored for a given endpoint.\n */\nexport const endpointsDeleteLog = <TData = AxiosResponse<void>>(\n id: string,\n logId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/endpoints/${id}/files/logs/${logId}`, options)\n}\n\n/**\n * @summary Gets the list of audio and transcription logs that have been stored when using the default base model of a given language.\n */\nexport const endpointsListBaseModelLogs = <TData = AxiosResponse<PaginatedFiles>>(\n locale: string,\n params?: EndpointsListBaseModelLogsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/endpoints/base/${locale}/files/logs`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * Deletion process is done asynchronously and can take up to one day depending on the amount of log files.\n * @summary Deletes the specified audio and transcription logs that have been stored when using the default base model of a given language. It deletes all logs before (and including) a specific day.\n */\nexport const endpointsDeleteBaseModelLogs = <TData = AxiosResponse<void>>(\n locale: string,\n params?: EndpointsDeleteBaseModelLogsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/endpoints/base/${locale}/files/logs`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets a specific audio or transcription log for the default base model in a given language.\n */\nexport const endpointsGetBaseModelLog = <TData = AxiosResponse<File>>(\n locale: string,\n logId: string,\n params?: EndpointsGetBaseModelLogParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/endpoints/base/${locale}/files/logs/${logId}`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Deletes one audio or transcription log that have been stored when using the default base model of a given language.\n */\nexport const endpointsDeleteBaseModelLog = <TData = AxiosResponse<void>>(\n locale: string,\n logId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/endpoints/base/${locale}/files/logs/${logId}`, options)\n}\n\n/**\n * @summary Gets a list of supported locales for evaluations.\n */\nexport const evaluationsListSupportedLocales = <TData = AxiosResponse<string[]>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/evaluations/locales\", options)\n}\n\n/**\n * @summary Gets the list of evaluations for the authenticated subscription.\n */\nexport const evaluationsList = <TData = AxiosResponse<PaginatedEvaluations>>(\n params?: EvaluationsListParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/evaluations\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Creates a new evaluation.\n */\nexport const evaluationsCreate = <TData = AxiosResponse<Evaluation>>(\n evaluation: NonReadonly<Evaluation>,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/evaluations\", evaluation, options)\n}\n\n/**\n * @summary Gets the files of the evaluation identified by the given ID.\n */\nexport const evaluationsListFiles = <TData = AxiosResponse<PaginatedFiles>>(\n id: string,\n params?: EvaluationsListFilesParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/evaluations/${id}/files`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets one specific file (identified with fileId) from an evaluation (identified with id).\n */\nexport const evaluationsGetFile = <TData = AxiosResponse<File>>(\n id: string,\n fileId: string,\n params?: EvaluationsGetFileParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/evaluations/${id}/files/${fileId}`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the evaluation identified by the given ID.\n */\nexport const evaluationsGet = <TData = AxiosResponse<Evaluation>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/evaluations/${id}`, options)\n}\n\n/**\n * @summary Updates the mutable details of the evaluation identified by its id.\n */\nexport const evaluationsUpdate = <TData = AxiosResponse<Evaluation>>(\n id: string,\n evaluationUpdate: EvaluationUpdate,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/evaluations/${id}`, evaluationUpdate, options)\n}\n\n/**\n * @summary Deletes the evaluation identified by the given ID.\n */\nexport const evaluationsDelete = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/evaluations/${id}`, options)\n}\n\n/**\n * @summary Gets a list of supported locales for model adaptation.\n */\nexport const modelsListSupportedLocales = <TData = AxiosResponse<string[]>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/models/locales\", options)\n}\n\n/**\n * @summary Gets the list of custom models for the authenticated subscription.\n */\nexport const modelsListCustomModels = <TData = AxiosResponse<PaginatedCustomModels>>(\n params?: ModelsListCustomModelsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/models\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Creates a new model.\n */\nexport const modelsCreate = <TData = AxiosResponse<CustomModel>>(\n customModel: NonReadonly<CustomModel>,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/models\", customModel, options)\n}\n\n/**\n * @summary Gets the list of base models for the authenticated subscription.\n */\nexport const modelsListBaseModels = <TData = AxiosResponse<PaginatedBaseModels>>(\n params?: ModelsListBaseModelsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/models/base\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the model identified by the given ID.\n */\nexport const modelsGetCustomModel = <TData = AxiosResponse<CustomModel>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/models/${id}`, options)\n}\n\n/**\n * @summary Updates the metadata of the model identified by the given ID.\n */\nexport const modelsUpdate = <TData = AxiosResponse<CustomModel>>(\n id: string,\n modelUpdate: ModelUpdate,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/models/${id}`, modelUpdate, options)\n}\n\n/**\n * @summary Deletes the model identified by the given ID.\n */\nexport const modelsDelete = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/models/${id}`, options)\n}\n\n/**\n * @summary Gets the base model identified by the given ID.\n */\nexport const modelsGetBaseModel = <TData = AxiosResponse<BaseModel>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/models/base/${id}`, options)\n}\n\n/**\n * This method can be used to copy a model from one location to another. If the target subscription\nkey belongs to a subscription created for another location, the model will be copied to that location.\nOnly adapted models are allowed to copy to another subscription.\n * @summary Copies a model from one subscription to another.\n */\nexport const modelsCopyTo = <TData = AxiosResponse<CustomModel>>(\n id: string,\n modelCopy: ModelCopy,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(`/models/${id}:copyto`, modelCopy, options)\n}\n\n/**\n * @summary Returns an manifest for this model which can be used in an on-premise container.\n */\nexport const modelsGetCustomModelManifest = <TData = AxiosResponse<ModelManifest>>(\n id: string,\n params?: ModelsGetCustomModelManifestParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/models/${id}/manifest`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Returns an manifest for this base model which can be used in an on-premise container.\n */\nexport const modelsGetBaseModelManifest = <TData = AxiosResponse<ModelManifest>>(\n id: string,\n params?: ModelsGetBaseModelManifestParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/models/base/${id}/manifest`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the files of the model identified by the given ID.\n */\nexport const modelsListFiles = <TData = AxiosResponse<PaginatedFiles>>(\n id: string,\n params?: ModelsListFilesParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/models/${id}/files`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets one specific file (identified with fileId) from a model (identified with id).\n */\nexport const modelsGetFile = <TData = AxiosResponse<File>>(\n id: string,\n fileId: string,\n params?: ModelsGetFileParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/models/${id}/files/${fileId}`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the list of supported locales.\n */\nexport const projectsListSupportedLocales = <TData = AxiosResponse<string[]>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/projects/locales\", options)\n}\n\n/**\n * @summary Gets the list of projects for the authenticated subscription.\n */\nexport const projectsList = <TData = AxiosResponse<PaginatedProjects>>(\n params?: ProjectsListParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/projects\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Creates a new project.\n */\nexport const projectsCreate = <TData = AxiosResponse<Project>>(\n project: NonReadonly<Project>,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/projects\", project, options)\n}\n\n/**\n * @summary Gets the project identified by the given ID.\n */\nexport const projectsGet = <TData = AxiosResponse<Project>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/projects/${id}`, options)\n}\n\n/**\n * @summary Updates the project identified by the given ID.\n */\nexport const projectsUpdate = <TData = AxiosResponse<Project>>(\n id: string,\n projectUpdate: ProjectUpdate,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/projects/${id}`, projectUpdate, options)\n}\n\n/**\n * @summary Deletes the project identified by the given ID.\n */\nexport const projectsDelete = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/projects/${id}`, options)\n}\n\n/**\n * @summary Gets the list of evaluations for specified project.\n */\nexport const projectsListEvaluations = <TData = AxiosResponse<PaginatedEvaluations>>(\n id: string,\n params?: ProjectsListEvaluationsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/projects/${id}/evaluations`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the list of datasets for specified project.\n */\nexport const projectsListDatasets = <TData = AxiosResponse<PaginatedDatasets>>(\n id: string,\n params?: ProjectsListDatasetsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/projects/${id}/datasets`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the list of endpoints for specified project.\n */\nexport const projectsListEndpoints = <TData = AxiosResponse<PaginatedEndpoints>>(\n id: string,\n params?: ProjectsListEndpointsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/projects/${id}/endpoints`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the list of models for specified project.\n */\nexport const projectsListModels = <TData = AxiosResponse<PaginatedCustomModels>>(\n id: string,\n params?: ProjectsListModelsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/projects/${id}/models`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the list of transcriptions for specified project.\n */\nexport const projectsListTranscriptions = <TData = AxiosResponse<PaginatedTranscriptions>>(\n id: string,\n params?: ProjectsListTranscriptionsParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/projects/${id}/transcriptions`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets a list of supported locales for offline transcriptions.\n */\nexport const transcriptionsListSupportedLocales = <TData = AxiosResponse<string[]>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/transcriptions/locales\", options)\n}\n\n/**\n * @summary Gets a list of transcriptions for the authenticated subscription.\n */\nexport const transcriptionsList = <TData = AxiosResponse<PaginatedTranscriptions>>(\n params?: TranscriptionsListParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/transcriptions\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Creates a new transcription.\n */\nexport const transcriptionsCreate = <TData = AxiosResponse<Transcription>>(\n transcription: NonReadonly<Transcription>,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/transcriptions\", transcription, options)\n}\n\n/**\n * @summary Gets the transcription identified by the given ID.\n */\nexport const transcriptionsGet = <TData = AxiosResponse<Transcription>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/transcriptions/${id}`, options)\n}\n\n/**\n * @summary Updates the mutable details of the transcription identified by its ID.\n */\nexport const transcriptionsUpdate = <TData = AxiosResponse<Transcription>>(\n id: string,\n transcriptionUpdate: TranscriptionUpdate,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/transcriptions/${id}`, transcriptionUpdate, options)\n}\n\n/**\n * @summary Deletes the specified transcription task.\n */\nexport const transcriptionsDelete = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/transcriptions/${id}`, options)\n}\n\n/**\n * @summary Gets the files of the transcription identified by the given ID.\n */\nexport const transcriptionsListFiles = <TData = AxiosResponse<PaginatedFiles>>(\n id: string,\n params?: TranscriptionsListFilesParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/transcriptions/${id}/files`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets one specific file (identified with fileId) from a transcription (identified with id).\n */\nexport const transcriptionsGetFile = <TData = AxiosResponse<File>>(\n id: string,\n fileId: string,\n params?: TranscriptionsGetFileParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/transcriptions/${id}/files/${fileId}`, {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * @summary Gets the list of web hooks for the authenticated subscription.\n */\nexport const webHooksList = <TData = AxiosResponse<PaginatedWebHooks>>(\n params?: WebHooksListParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/webhooks\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * If the property secret in the configuration is present and contains a non-empty string, it will be used to create a SHA256 hash of the payload with\nthe secret as HMAC key. This hash will be set as X-MicrosoftSpeechServices-Signature header when calling back into the registered URL.\n \nWhen calling back into the registered URL, the request will contain a X-MicrosoftSpeechServices-Event header containing one of the registered event\ntypes. There will be one request per registered event type.\n \nAfter successfully registering the web hook, it will not be usable until a challenge/response is completed. To do this, a request with the event type\nchallenge will be made with a query parameter called validationToken. Respond to the challenge with a 200 OK containing the value of the validationToken\nquery parameter as the response body. When the challenge/response is successfully completed, the web hook will begin receiving events.\n * @summary Creates a new web hook.\n */\nexport const webHooksCreate = <TData = AxiosResponse<WebHook>>(\n webHook: NonReadonly<WebHook>,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/webhooks\", webHook, options)\n}\n\n/**\n * @summary Gets the web hook identified by the given ID.\n */\nexport const webHooksGet = <TData = AxiosResponse<WebHook>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/webhooks/${id}`, options)\n}\n\n/**\n * If the property secret in the configuration is omitted or contains an empty string, future callbacks won't contain X-MicrosoftSpeechServices-Signature\nheaders. If the property contains a non-empty string, it will be used to create a SHA256 hash of the payload with the secret as HMAC key. This hash\nwill be set as X-MicrosoftSpeechServices-Signature header when calling back into the registered URL.\n \nIf the URL changes, the web hook will stop receiving events until a\nchallenge/response is completed. To do this, a request with the event type challenge will be made with a query parameter called validationToken.\nRespond to the challenge with a 200 OK containing the value of the validationToken query parameter as the response body. When the challenge/response\nis successfully completed, the web hook will begin receiving events.\n * @summary Updates the web hook identified by the given ID.\n */\nexport const webHooksUpdate = <TData = AxiosResponse<WebHook>>(\n id: string,\n webHookUpdate: WebHookUpdate,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.patch(`/webhooks/${id}`, webHookUpdate, options)\n}\n\n/**\n * @summary Deletes the web hook identified by the given ID.\n */\nexport const webHooksDelete = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/webhooks/${id}`, options)\n}\n\n/**\n * The request body of the POST request sent to the registered web hook URL is of the same shape as in the GET request for a specific hook.\nThe Swagger Schema ID of the model is WebHookV3.\n \nThe request will contain a X-MicrosoftSpeechServices-Event header with the value ping. If the web hook was registered with\na secret it will contain a X-MicrosoftSpeechServices-Signature header with an SHA256 hash of the payload with\nthe secret as HMAC key. The hash is base64 encoded.\n * @summary Sends a ping event to the registered URL.\n */\nexport const webHooksPing = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(`/webhooks/${id}:ping`, undefined, options)\n}\n\n/**\n * The payload will be generated from the last entity that would have invoked the web hook. If no entity is present for none of the registered event types,\nthe POST will respond with 204. If a test request can be made, it will respond with 200.\nThe request will contain a X-MicrosoftSpeechServices-Event header with the respective registered event type.\nIf the web hook was registered with a secret it will contain a X-MicrosoftSpeechServices-Signature header with an SHA256 hash of the payload with\nthe secret as HMAC key. The hash is base64 encoded.\n * @summary Sends a request for each registered event type to the registered URL.\n */\nexport const webHooksTest = <TData = AxiosResponse<void>>(\n id: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(`/webhooks/${id}:test`, undefined, options)\n}\n\n/**\n * @summary Returns the overall health of the service and optionally of the different subcomponents.\n */\nexport const serviceHealthGet = <TData = AxiosResponse<ServiceHealth>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/healthstatus\", options)\n}\n\nexport type DatasetsListSupportedLocalesResult = AxiosResponse<DatasetLocales>\nexport type DatasetsListResult = AxiosResponse<PaginatedDatasets>\nexport type DatasetsCreateResult = AxiosResponse<Dataset>\nexport type DatasetsGetResult = AxiosResponse<Dataset>\nexport type DatasetsUpdateResult = AxiosResponse<Dataset>\nexport type DatasetsDeleteResult = AxiosResponse<void>\nexport type DatasetsGetBlocksResult = AxiosResponse<UploadedBlocks>\nexport type DatasetsUploadBlockResult = AxiosResponse<void>\nexport type DatasetsUploadResult = AxiosResponse<Dataset>\nexport type DatasetsCommitBlocksResult = AxiosResponse<void>\nexport type DatasetsListFilesResult = AxiosResponse<PaginatedFiles>\nexport type DatasetsGetFileResult = AxiosResponse<File>\nexport type EndpointsListSupportedLocalesResult = AxiosResponse<string[]>\nexport type EndpointsListResult = AxiosResponse<PaginatedEndpoints>\nexport type EndpointsCreateResult = AxiosResponse<Endpoint>\nexport type EndpointsGetResult = AxiosResponse<Endpoint>\nexport type EndpointsDeleteResult = AxiosResponse<void>\nexport type EndpointsUpdateResult = AxiosResponse<Endpoint>\nexport type EndpointsListLogsResult = AxiosResponse<PaginatedFiles>\nexport type EndpointsDeleteLogsResult = AxiosResponse<void>\nexport type EndpointsGetLogResult = AxiosResponse<File>\nexport type EndpointsDeleteLogResult = AxiosResponse<void>\nexport type EndpointsListBaseModelLogsResult = AxiosResponse<PaginatedFiles>\nexport type EndpointsDeleteBaseModelLogsResult = AxiosResponse<void>\nexport type EndpointsGetBaseModelLogResult = AxiosResponse<File>\nexport type EndpointsDeleteBaseModelLogResult = AxiosResponse<void>\nexport type EvaluationsListSupportedLocalesResult = AxiosResponse<string[]>\nexport type EvaluationsListResult = AxiosResponse<PaginatedEvaluations>\nexport type EvaluationsCreateResult = AxiosResponse<Evaluation>\nexport type EvaluationsListFilesResult = AxiosResponse<PaginatedFiles>\nexport type EvaluationsGetFileResult = AxiosResponse<File>\nexport type EvaluationsGetResult = AxiosResponse<Evaluation>\nexport type EvaluationsUpdateResult = AxiosResponse<Evaluation>\nexport type EvaluationsDeleteResult = AxiosResponse<void>\nexport type ModelsListSupportedLocalesResult = AxiosResponse<string[]>\nexport type ModelsListCustomModelsResult = AxiosResponse<PaginatedCustomModels>\nexport type ModelsCreateResult = AxiosResponse<CustomModel>\nexport type ModelsListBaseModelsResult = AxiosResponse<PaginatedBaseModels>\nexport type ModelsGetCustomModelResult = AxiosResponse<CustomModel>\nexport type ModelsUpdateResult = AxiosResponse<CustomModel>\nexport type ModelsDeleteResult = AxiosResponse<void>\nexport type ModelsGetBaseModelResult = AxiosResponse<BaseModel>\nexport type ModelsCopyToResult = AxiosResponse<CustomModel>\nexport type ModelsGetCustomModelManifestResult = AxiosResponse<ModelManifest>\nexport type ModelsGetBaseModelManifestResult = AxiosResponse<ModelManifest>\nexport type ModelsListFilesResult = AxiosResponse<PaginatedFiles>\nexport type ModelsGetFileResult = AxiosResponse<File>\nexport type ProjectsListSupportedLocalesResult = AxiosResponse<string[]>\nexport type ProjectsListResult = AxiosResponse<PaginatedProjects>\nexport type ProjectsCreateResult = AxiosResponse<Project>\nexport type ProjectsGetResult = AxiosResponse<Project>\nexport type ProjectsUpdateResult = AxiosResponse<Project>\nexport type ProjectsDeleteResult = AxiosResponse<void>\nexport type ProjectsListEvaluationsResult = AxiosResponse<PaginatedEvaluations>\nexport type ProjectsListDatasetsResult = AxiosResponse<PaginatedDatasets>\nexport type ProjectsListEndpointsResult = AxiosResponse<PaginatedEndpoints>\nexport type ProjectsListModelsResult = AxiosResponse<PaginatedCustomModels>\nexport type ProjectsListTranscriptionsResult = AxiosResponse<PaginatedTranscriptions>\nexport type TranscriptionsListSupportedLocalesResult = AxiosResponse<string[]>\nexport type TranscriptionsListResult = AxiosResponse<PaginatedTranscriptions>\nexport type TranscriptionsCreateResult = AxiosResponse<Transcription>\nexport type TranscriptionsGetResult = AxiosResponse<Transcription>\nexport type TranscriptionsUpdateResult = AxiosResponse<Transcription>\nexport type TranscriptionsDeleteResult = AxiosResponse<void>\nexport type TranscriptionsListFilesResult = AxiosResponse<PaginatedFiles>\nexport type TranscriptionsGetFileResult = AxiosResponse<File>\nexport type WebHooksListResult = AxiosResponse<PaginatedWebHooks>\nexport type WebHooksCreateResult = AxiosResponse<WebHook>\nexport type WebHooksGetResult = AxiosResponse<WebHook>\nexport type WebHooksUpdateResult = AxiosResponse<WebHook>\nexport type WebHooksDeleteResult = AxiosResponse<void>\nexport type WebHooksPingResult = AxiosResponse<void>\nexport type WebHooksTestResult = AxiosResponse<void>\nexport type ServiceHealthGetResult = AxiosResponse<ServiceHealth>\n\nexport const getDatasetsListSupportedLocalesResponseMock = (): DatasetLocales => ({\n [faker.string.alphanumeric(5)]: faker.helpers.arrayElements(Object.values(DatasetKind))\n})\n\nexport const getDatasetsListResponseMock = (\n overrideResponse: Partial<PaginatedDatasets> = {}\n): PaginatedDatasets => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n links: faker.helpers.arrayElement([\n {\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n commitBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n listBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n uploadBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n acceptedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n rejectedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n kind: faker.helpers.arrayElement(Object.values(DatasetKind)),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getDatasetsCreateResponseMock = (\n overrideResponse: Partial<Dataset> = {}\n): Dataset => ({\n links: faker.helpers.arrayElement([\n {\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n commitBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n listBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n uploadBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n acceptedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n rejectedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n kind: faker.helpers.arrayElement(Object.values(DatasetKind)),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getDatasetsGetResponseMock = (overrideResponse: Partial<Dataset> = {}): Dataset => ({\n links: faker.helpers.arrayElement([\n {\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n commitBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n listBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n uploadBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n acceptedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n rejectedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n kind: faker.helpers.arrayElement(Object.values(DatasetKind)),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getDatasetsUpdateResponseMock = (\n overrideResponse: Partial<Dataset> = {}\n): Dataset => ({\n links: faker.helpers.arrayElement([\n {\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n commitBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n listBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n uploadBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n acceptedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n rejectedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n kind: faker.helpers.arrayElement(Object.values(DatasetKind)),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getDatasetsGetBlocksResponseMock = (\n overrideResponse: Partial<UploadedBlocks> = {}\n): UploadedBlocks => ({\n committedBlocks: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ])\n })),\n undefined\n ]),\n uncommittedBlocks: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ])\n })),\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getDatasetsUploadResponseMock = (\n overrideResponse: Partial<Dataset> = {}\n): Dataset => ({\n links: faker.helpers.arrayElement([\n {\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n commitBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n listBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n uploadBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n acceptedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n rejectedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n kind: faker.helpers.arrayElement(Object.values(DatasetKind)),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getDatasetsListFilesResponseMock = (\n overrideResponse: Partial<PaginatedFiles> = {}\n): PaginatedFiles => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getDatasetsGetFileResponseMock = (overrideResponse: Partial<File> = {}): File => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEndpointsListSupportedLocalesResponseMock = (): string[] =>\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, () => faker.word.sample())\n\nexport const getEndpointsListResponseMock = (\n overrideResponse: Partial<PaginatedEndpoints> = {}\n): PaginatedEndpoints => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n restInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n logs: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n loggingEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEndpointsCreateResponseMock = (\n overrideResponse: Partial<Endpoint> = {}\n): Endpoint => ({\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n restInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n logs: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n loggingEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getEndpointsGetResponseMock = (\n overrideResponse: Partial<Endpoint> = {}\n): Endpoint => ({\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n restInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n logs: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n loggingEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getEndpointsUpdateResponseMock = (\n overrideResponse: Partial<Endpoint> = {}\n): Endpoint => ({\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n restInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n logs: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n loggingEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getEndpointsListLogsResponseMock = (\n overrideResponse: Partial<PaginatedFiles> = {}\n): PaginatedFiles => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEndpointsGetLogResponseMock = (overrideResponse: Partial<File> = {}): File => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEndpointsListBaseModelLogsResponseMock = (\n overrideResponse: Partial<PaginatedFiles> = {}\n): PaginatedFiles => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEndpointsGetBaseModelLogResponseMock = (\n overrideResponse: Partial<File> = {}\n): File => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEvaluationsListSupportedLocalesResponseMock = (): string[] =>\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, () => faker.word.sample())\n\nexport const getEvaluationsListResponseMock = (\n overrideResponse: Partial<PaginatedEvaluations> = {}\n): PaginatedEvaluations => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n model1: { self: faker.internet.url() },\n model2: { self: faker.internet.url() },\n transcription1: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n transcription2: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: { self: faker.internet.url() },\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n wordErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n wordErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n sentenceErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20)\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEvaluationsCreateResponseMock = (\n overrideResponse: Partial<Evaluation> = {}\n): Evaluation => ({\n model1: { self: faker.internet.url() },\n model2: { self: faker.internet.url() },\n transcription1: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n transcription2: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: { self: faker.internet.url() },\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n wordErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n wordErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n sentenceErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n ...overrideResponse\n})\n\nexport const getEvaluationsListFilesResponseMock = (\n overrideResponse: Partial<PaginatedFiles> = {}\n): PaginatedFiles => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEvaluationsGetFileResponseMock = (overrideResponse: Partial<File> = {}): File => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getEvaluationsGetResponseMock = (\n overrideResponse: Partial<Evaluation> = {}\n): Evaluation => ({\n model1: { self: faker.internet.url() },\n model2: { self: faker.internet.url() },\n transcription1: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n transcription2: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: { self: faker.internet.url() },\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n wordErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n wordErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n sentenceErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n ...overrideResponse\n})\n\nexport const getEvaluationsUpdateResponseMock = (\n overrideResponse: Partial<Evaluation> = {}\n): Evaluation => ({\n model1: { self: faker.internet.url() },\n model2: { self: faker.internet.url() },\n transcription1: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n transcription2: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: { self: faker.internet.url() },\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n wordErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n wordErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n sentenceErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n ...overrideResponse\n})\n\nexport const getModelsListSupportedLocalesResponseMock = (): string[] =>\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, () => faker.word.sample())\n\nexport const getModelsListCustomModelsResponseMock = (\n overrideResponse: Partial<PaginatedCustomModels> = {}\n): PaginatedCustomModels => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n copyTo: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n manifest: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n customModelWeightPercent: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 100 }),\n undefined\n ]),\n deprecationDates: faker.helpers.arrayElement([\n {\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n baseModel: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n datasets: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n self: faker.internet.url()\n })),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getModelsCreateResponseMock = (): CustomModel => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n copyTo: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n manifest: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n customModelWeightPercent: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 100 }),\n undefined\n ]),\n deprecationDates: faker.helpers.arrayElement([\n {\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n baseModel: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n datasets: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n self: faker.internet.url()\n })),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ])\n})\n\nexport const getModelsListBaseModelsResponseMock = (\n overrideResponse: Partial<PaginatedBaseModels> = {}\n): PaginatedBaseModels => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n links: faker.helpers.arrayElement([\n { manifest: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n deprecationDates: faker.helpers.arrayElement([\n {\n adaptationDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n supportsAdaptationsWith: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(DatasetKind)),\n undefined\n ])\n },\n undefined\n ])\n },\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getModelsGetCustomModelResponseMock = (): CustomModel => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n copyTo: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n manifest: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n customModelWeightPercent: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 100 }),\n undefined\n ]),\n deprecationDates: faker.helpers.arrayElement([\n {\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n baseModel: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n datasets: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n self: faker.internet.url()\n })),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ])\n})\n\nexport const getModelsUpdateResponseMock = (): CustomModel => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n copyTo: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n manifest: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n customModelWeightPercent: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 100 }),\n undefined\n ]),\n deprecationDates: faker.helpers.arrayElement([\n {\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n baseModel: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n datasets: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n self: faker.internet.url()\n })),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ])\n})\n\nexport const getModelsGetBaseModelResponseMock = (): BaseModel => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n links: faker.helpers.arrayElement([\n { manifest: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n deprecationDates: faker.helpers.arrayElement([\n {\n adaptationDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n supportsAdaptationsWith: faker.helpers.arrayElement([\n faker.helpers.arrayElements(Object.values(DatasetKind)),\n undefined\n ])\n },\n undefined\n ])\n },\n undefined\n ])\n})\n\nexport const getModelsCopyToResponseMock = (): CustomModel => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n copyTo: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n manifest: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n customModelWeightPercent: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 100 }),\n undefined\n ]),\n deprecationDates: faker.helpers.arrayElement([\n {\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n baseModel: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n datasets: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n self: faker.internet.url()\n })),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ])\n})\n\nexport const getModelsGetCustomModelManifestResponseMock = (\n overrideResponse: Partial<ModelManifest> = {}\n): ModelManifest => ({\n model: { self: faker.internet.url() },\n modelFiles: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })\n ),\n properties: {\n [faker.string.alphanumeric(5)]: {}\n },\n ...overrideResponse\n})\n\nexport const getModelsGetBaseModelManifestResponseMock = (\n overrideResponse: Partial<ModelManifest> = {}\n): ModelManifest => ({\n model: { self: faker.internet.url() },\n modelFiles: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(\n () => ({\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })\n ),\n properties: {\n [faker.string.alphanumeric(5)]: {}\n },\n ...overrideResponse\n})\n\nexport const getModelsListFilesResponseMock = (\n overrideResponse: Partial<PaginatedFiles> = {}\n): PaginatedFiles => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getModelsGetFileResponseMock = (overrideResponse: Partial<File> = {}): File => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getProjectsListSupportedLocalesResponseMock = (): string[] =>\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, () => faker.word.sample())\n\nexport const getProjectsListResponseMock = (\n overrideResponse: Partial<PaginatedProjects> = {}\n): PaginatedProjects => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n links: faker.helpers.arrayElement([\n {\n evaluations: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n datasets: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n models: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n endpoints: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n transcriptions: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n datasetCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n evaluationCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n modelCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n transcriptionCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n endpointCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getProjectsCreateResponseMock = (\n overrideResponse: Partial<Project> = {}\n): Project => ({\n links: faker.helpers.arrayElement([\n {\n evaluations: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n datasets: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n models: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n endpoints: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n transcriptions: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n datasetCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n evaluationCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n modelCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n transcriptionCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n endpointCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getProjectsGetResponseMock = (overrideResponse: Partial<Project> = {}): Project => ({\n links: faker.helpers.arrayElement([\n {\n evaluations: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n datasets: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n models: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n endpoints: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n transcriptions: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n datasetCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n evaluationCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n modelCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n transcriptionCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n endpointCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getProjectsUpdateResponseMock = (\n overrideResponse: Partial<Project> = {}\n): Project => ({\n links: faker.helpers.arrayElement([\n {\n evaluations: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n datasets: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n models: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n endpoints: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n transcriptions: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n datasetCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n evaluationCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n modelCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n transcriptionCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n endpointCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getProjectsListEvaluationsResponseMock = (\n overrideResponse: Partial<PaginatedEvaluations> = {}\n): PaginatedEvaluations => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n model1: { self: faker.internet.url() },\n model2: { self: faker.internet.url() },\n transcription1: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n transcription2: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: { self: faker.internet.url() },\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n wordErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n wordErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceErrorRate2: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount2: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n sentenceErrorRate1: faker.helpers.arrayElement([faker.number.float(), undefined]),\n sentenceCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n correctWordCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordSubstitutionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordDeletionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n wordInsertionCount1: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20)\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getProjectsListDatasetsResponseMock = (\n overrideResponse: Partial<PaginatedDatasets> = {}\n): PaginatedDatasets => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n links: faker.helpers.arrayElement([\n {\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n commitBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n listBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n uploadBlocks: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n acceptedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n rejectedLineCount: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n kind: faker.helpers.arrayElement(Object.values(DatasetKind)),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n locale: faker.string.alpha(20),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getProjectsListEndpointsResponseMock = (\n overrideResponse: Partial<PaginatedEndpoints> = {}\n): PaginatedEndpoints => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n restInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n restDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketInteractive: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketConversation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n webSocketDictation: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n logs: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n loggingEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n locale: faker.string.alpha(20),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getProjectsListModelsResponseMock = (\n overrideResponse: Partial<PaginatedCustomModels> = {}\n): PaginatedCustomModels => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n ...{\n ...{\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n }\n },\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n links: faker.helpers.arrayElement([\n {\n copyTo: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n files: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n manifest: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n customModelWeightPercent: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: 100 }),\n undefined\n ]),\n deprecationDates: faker.helpers.arrayElement([\n {\n transcriptionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n },\n undefined\n ]),\n features: faker.helpers.arrayElement([\n {\n ...{\n supportsTranscriptions: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsEndpoints: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n supportsTranscriptionsOnSpeechContainers: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ])\n }\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n text: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n baseModel: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n datasets: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n self: faker.internet.url()\n })),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getProjectsListTranscriptionsResponseMock = (\n overrideResponse: Partial<PaginatedTranscriptions> = {}\n): PaginatedTranscriptions => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n diarizationEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n wordLevelTimestampsEnabled: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n displayFormWordLevelTimestampsEnabled: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n channels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.number.int({ min: undefined, max: undefined })\n ),\n undefined\n ]),\n destinationContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n punctuationMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(PunctuationMode)),\n undefined\n ]),\n profanityFilterMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(ProfanityFilterMode)),\n undefined\n ]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n diarization: faker.helpers.arrayElement([\n {\n speakers: {\n minCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maxCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n languageIdentification: faker.helpers.arrayElement([\n {\n candidateLocales: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20)),\n speechModelMapping: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: { self: faker.internet.url() }\n },\n undefined\n ])\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrls: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.internet.url()\n ),\n undefined\n ]),\n contentContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getTranscriptionsListSupportedLocalesResponseMock = (): string[] =>\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, () => faker.word.sample())\n\nexport const getTranscriptionsListResponseMock = (\n overrideResponse: Partial<PaginatedTranscriptions> = {}\n): PaginatedTranscriptions => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n diarizationEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n wordLevelTimestampsEnabled: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n displayFormWordLevelTimestampsEnabled: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n channels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.number.int({ min: undefined, max: undefined })\n ),\n undefined\n ]),\n destinationContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n punctuationMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(PunctuationMode)),\n undefined\n ]),\n profanityFilterMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(ProfanityFilterMode)),\n undefined\n ]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n diarization: faker.helpers.arrayElement([\n {\n speakers: {\n minCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maxCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n languageIdentification: faker.helpers.arrayElement([\n {\n candidateLocales: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20)),\n speechModelMapping: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: { self: faker.internet.url() }\n },\n undefined\n ])\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrls: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.internet.url()\n ),\n undefined\n ]),\n contentContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getTranscriptionsCreateResponseMock = (\n overrideResponse: Partial<Transcription> = {}\n): Transcription => ({\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n diarizationEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n wordLevelTimestampsEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n displayFormWordLevelTimestampsEnabled: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n channels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.number.int({ min: undefined, max: undefined })\n ),\n undefined\n ]),\n destinationContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n punctuationMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(PunctuationMode)),\n undefined\n ]),\n profanityFilterMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(ProfanityFilterMode)),\n undefined\n ]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n diarization: faker.helpers.arrayElement([\n {\n speakers: {\n minCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maxCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n languageIdentification: faker.helpers.arrayElement([\n {\n candidateLocales: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20)),\n speechModelMapping: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: { self: faker.internet.url() }\n },\n undefined\n ])\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrls: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.internet.url()\n ),\n undefined\n ]),\n contentContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getTranscriptionsGetResponseMock = (\n overrideResponse: Partial<Transcription> = {}\n): Transcription => ({\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n diarizationEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n wordLevelTimestampsEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n displayFormWordLevelTimestampsEnabled: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n channels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.number.int({ min: undefined, max: undefined })\n ),\n undefined\n ]),\n destinationContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n punctuationMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(PunctuationMode)),\n undefined\n ]),\n profanityFilterMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(ProfanityFilterMode)),\n undefined\n ]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n diarization: faker.helpers.arrayElement([\n {\n speakers: {\n minCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maxCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n languageIdentification: faker.helpers.arrayElement([\n {\n candidateLocales: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20)),\n speechModelMapping: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: { self: faker.internet.url() }\n },\n undefined\n ])\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrls: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.internet.url()\n ),\n undefined\n ]),\n contentContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getTranscriptionsUpdateResponseMock = (\n overrideResponse: Partial<Transcription> = {}\n): Transcription => ({\n links: faker.helpers.arrayElement([\n { files: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n diarizationEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n wordLevelTimestampsEnabled: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n displayFormWordLevelTimestampsEnabled: faker.helpers.arrayElement([\n faker.datatype.boolean(),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n channels: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.number.int({ min: undefined, max: undefined })\n ),\n undefined\n ]),\n destinationContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n punctuationMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(PunctuationMode)),\n undefined\n ]),\n profanityFilterMode: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(ProfanityFilterMode)),\n undefined\n ]),\n timeToLive: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n diarization: faker.helpers.arrayElement([\n {\n speakers: {\n minCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ]),\n maxCount: faker.helpers.arrayElement([\n faker.number.int({ min: 1, max: undefined }),\n undefined\n ])\n }\n },\n undefined\n ]),\n languageIdentification: faker.helpers.arrayElement([\n {\n candidateLocales: Array.from(\n { length: faker.number.int({ min: 1, max: 10 }) },\n (_, i) => i + 1\n ).map(() => faker.string.alpha(20)),\n speechModelMapping: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: { self: faker.internet.url() }\n },\n undefined\n ])\n },\n undefined\n ]),\n email: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n model: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n project: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n dataset: faker.helpers.arrayElement([{ self: faker.internet.url() }, undefined]),\n contentUrls: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() =>\n faker.internet.url()\n ),\n undefined\n ]),\n contentContainerUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n locale: faker.string.alpha(20),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getTranscriptionsListFilesResponseMock = (\n overrideResponse: Partial<PaginatedFiles> = {}\n): PaginatedFiles => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getTranscriptionsGetFileResponseMock = (\n overrideResponse: Partial<File> = {}\n): File => ({\n kind: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(FileKind)),\n undefined\n ]),\n links: faker.helpers.arrayElement([\n { contentUrl: faker.helpers.arrayElement([faker.internet.url(), undefined]) },\n undefined\n ]),\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n size: faker.helpers.arrayElement([\n faker.number.int({ min: undefined, max: undefined }),\n undefined\n ]),\n duration: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getWebHooksListResponseMock = (\n overrideResponse: Partial<PaginatedWebHooks> = {}\n): PaginatedWebHooks => ({\n values: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n webUrl: faker.internet.url(),\n links: faker.helpers.arrayElement([\n {\n ping: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n test: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n apiVersion: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n secret: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n events: {\n datasetCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n ping: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n challenge: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n },\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ])\n })),\n undefined\n ]),\n \"@nextLink\": faker.helpers.arrayElement([faker.internet.url(), undefined]),\n ...overrideResponse\n})\n\nexport const getWebHooksCreateResponseMock = (\n overrideResponse: Partial<WebHook> = {}\n): WebHook => ({\n webUrl: faker.internet.url(),\n links: faker.helpers.arrayElement([\n {\n ping: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n test: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n apiVersion: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n secret: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n events: {\n datasetCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n ping: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n challenge: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n },\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getWebHooksGetResponseMock = (overrideResponse: Partial<WebHook> = {}): WebHook => ({\n webUrl: faker.internet.url(),\n links: faker.helpers.arrayElement([\n {\n ping: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n test: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n apiVersion: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n secret: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n events: {\n datasetCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n ping: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n challenge: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n },\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getWebHooksUpdateResponseMock = (\n overrideResponse: Partial<WebHook> = {}\n): WebHook => ({\n webUrl: faker.internet.url(),\n links: faker.helpers.arrayElement([\n {\n ping: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n test: faker.helpers.arrayElement([faker.internet.url(), undefined])\n },\n undefined\n ]),\n properties: faker.helpers.arrayElement([\n {\n error: faker.helpers.arrayElement([\n {\n code: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n apiVersion: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n secret: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n },\n undefined\n ]),\n self: faker.helpers.arrayElement([faker.internet.url(), undefined]),\n displayName: faker.string.alpha(20),\n description: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n events: {\n datasetCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n datasetDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n modelDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n evaluationDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n transcriptionDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCreation: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointProcessing: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointCompletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n endpointDeletion: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n ping: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]),\n challenge: faker.helpers.arrayElement([faker.datatype.boolean(), undefined])\n },\n createdDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n lastActionDateTime: faker.helpers.arrayElement([\n `${faker.date.past().toISOString().split(\".\")[0]}Z`,\n undefined\n ]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(Status)),\n undefined\n ]),\n customProperties: faker.helpers.arrayElement([\n {\n [faker.string.alphanumeric(5)]: faker.string.alpha(20)\n },\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getServiceHealthGetResponseMock = (\n overrideResponse: Partial<ServiceHealth> = {}\n): ServiceHealth => ({\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(HealthStatus)),\n undefined\n ]),\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n components: faker.helpers.arrayElement([\n Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({\n message: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n name: faker.helpers.arrayElement([faker.string.alpha(20), undefined]),\n status: faker.helpers.arrayElement([\n faker.helpers.arrayElement(Object.values(HealthStatus)),\n undefined\n ]),\n type: faker.helpers.arrayElement([faker.string.alpha(20), undefined])\n })),\n undefined\n ]),\n ...overrideResponse\n})\n\nexport const getDatasetsListSupportedLocalesMockHandler = (\n overrideResponse?:\n | DatasetLocales\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<DatasetLocales> | DatasetLocales)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/datasets/locales\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsListSupportedLocalesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsListMockHandler = (\n overrideResponse?:\n | PaginatedDatasets\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedDatasets> | PaginatedDatasets)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/datasets\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsListResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsCreateMockHandler = (\n overrideResponse?:\n | Dataset\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<Dataset> | Dataset)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/datasets\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsCreateResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsGetMockHandler = (\n overrideResponse?:\n | Dataset\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Dataset> | Dataset)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/datasets/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsGetResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsUpdateMockHandler = (\n overrideResponse?:\n | Dataset\n | ((info: Parameters<Parameters<typeof http.patch>[1]>[0]) => Promise<Dataset> | Dataset)\n) => {\n return http.patch(\"https://api.cognitive.microsoft.com/datasets/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsUpdateResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsDeleteMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.cognitive.microsoft.com/datasets/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getDatasetsGetBlocksMockHandler = (\n overrideResponse?:\n | UploadedBlocks\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<UploadedBlocks> | UploadedBlocks)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/datasets/:id/blocks\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsGetBlocksResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsUploadBlockMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.put>[1]>[0]) => Promise<void> | void)\n) => {\n return http.put(\"https://api.cognitive.microsoft.com/datasets/:id/blocks\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 201 })\n })\n}\n\nexport const getDatasetsUploadMockHandler = (\n overrideResponse?:\n | Dataset\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<Dataset> | Dataset)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/datasets/upload\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsUploadResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsCommitBlocksMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<void> | void)\n) => {\n return http.post(\n \"https://api.cognitive.microsoft.com/datasets/:id/blocks:commit\",\n async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 200 })\n }\n )\n}\n\nexport const getDatasetsListFilesMockHandler = (\n overrideResponse?:\n | PaginatedFiles\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedFiles> | PaginatedFiles)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/datasets/:id/files\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsListFilesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getDatasetsGetFileMockHandler = (\n overrideResponse?:\n | File\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<File> | File)\n) => {\n return http.get(\n \"https://api.cognitive.microsoft.com/datasets/:id/files/:fileId\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getDatasetsGetFileResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getEndpointsListSupportedLocalesMockHandler = (\n overrideResponse?:\n | string[]\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<string[]> | string[])\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/endpoints/locales\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsListSupportedLocalesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEndpointsListMockHandler = (\n overrideResponse?:\n | PaginatedEndpoints\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedEndpoints> | PaginatedEndpoints)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/endpoints\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsListResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEndpointsCreateMockHandler = (\n overrideResponse?:\n | Endpoint\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<Endpoint> | Endpoint)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/endpoints\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsCreateResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEndpointsGetMockHandler = (\n overrideResponse?:\n | Endpoint\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Endpoint> | Endpoint)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/endpoints/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsGetResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEndpointsDeleteMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.cognitive.microsoft.com/endpoints/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getEndpointsUpdateMockHandler = (\n overrideResponse?:\n | Endpoint\n | ((info: Parameters<Parameters<typeof http.patch>[1]>[0]) => Promise<Endpoint> | Endpoint)\n) => {\n return http.patch(\"https://api.cognitive.microsoft.com/endpoints/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsUpdateResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEndpointsListLogsMockHandler = (\n overrideResponse?:\n | PaginatedFiles\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedFiles> | PaginatedFiles)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/endpoints/:id/files/logs\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsListLogsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEndpointsDeleteLogsMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\n \"https://api.cognitive.microsoft.com/endpoints/:id/files/logs\",\n async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 202 })\n }\n )\n}\n\nexport const getEndpointsGetLogMockHandler = (\n overrideResponse?:\n | File\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<File> | File)\n) => {\n return http.get(\n \"https://api.cognitive.microsoft.com/endpoints/:id/files/logs/:logId\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsGetLogResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getEndpointsDeleteLogMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\n \"https://api.cognitive.microsoft.com/endpoints/:id/files/logs/:logId\",\n async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n }\n )\n}\n\nexport const getEndpointsListBaseModelLogsMockHandler = (\n overrideResponse?:\n | PaginatedFiles\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedFiles> | PaginatedFiles)\n) => {\n return http.get(\n \"https://api.cognitive.microsoft.com/endpoints/base/:locale/files/logs\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsListBaseModelLogsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getEndpointsDeleteBaseModelLogsMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\n \"https://api.cognitive.microsoft.com/endpoints/base/:locale/files/logs\",\n async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 202 })\n }\n )\n}\n\nexport const getEndpointsGetBaseModelLogMockHandler = (\n overrideResponse?:\n | File\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<File> | File)\n) => {\n return http.get(\n \"https://api.cognitive.microsoft.com/endpoints/base/:locale/files/logs/:logId\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEndpointsGetBaseModelLogResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getEndpointsDeleteBaseModelLogMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\n \"https://api.cognitive.microsoft.com/endpoints/base/:locale/files/logs/:logId\",\n async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n }\n )\n}\n\nexport const getEvaluationsListSupportedLocalesMockHandler = (\n overrideResponse?:\n | string[]\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<string[]> | string[])\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/evaluations/locales\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEvaluationsListSupportedLocalesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEvaluationsListMockHandler = (\n overrideResponse?:\n | PaginatedEvaluations\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedEvaluations> | PaginatedEvaluations)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/evaluations\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEvaluationsListResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEvaluationsCreateMockHandler = (\n overrideResponse?:\n | Evaluation\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<Evaluation> | Evaluation)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/evaluations\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEvaluationsCreateResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEvaluationsListFilesMockHandler = (\n overrideResponse?:\n | PaginatedFiles\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedFiles> | PaginatedFiles)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/evaluations/:id/files\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEvaluationsListFilesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEvaluationsGetFileMockHandler = (\n overrideResponse?:\n | File\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<File> | File)\n) => {\n return http.get(\n \"https://api.cognitive.microsoft.com/evaluations/:id/files/:fileId\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEvaluationsGetFileResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getEvaluationsGetMockHandler = (\n overrideResponse?:\n | Evaluation\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Evaluation> | Evaluation)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/evaluations/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEvaluationsGetResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEvaluationsUpdateMockHandler = (\n overrideResponse?:\n | Evaluation\n | ((info: Parameters<Parameters<typeof http.patch>[1]>[0]) => Promise<Evaluation> | Evaluation)\n) => {\n return http.patch(\"https://api.cognitive.microsoft.com/evaluations/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getEvaluationsUpdateResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getEvaluationsDeleteMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.cognitive.microsoft.com/evaluations/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getModelsListSupportedLocalesMockHandler = (\n overrideResponse?:\n | string[]\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<string[]> | string[])\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/locales\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsListSupportedLocalesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsListCustomModelsMockHandler = (\n overrideResponse?:\n | PaginatedCustomModels\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedCustomModels> | PaginatedCustomModels)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsListCustomModelsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsCreateMockHandler = (\n overrideResponse?:\n | CustomModel\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<CustomModel> | CustomModel)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/models\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsCreateResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsListBaseModelsMockHandler = (\n overrideResponse?:\n | PaginatedBaseModels\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedBaseModels> | PaginatedBaseModels)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/base\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsListBaseModelsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsGetCustomModelMockHandler = (\n overrideResponse?:\n | CustomModel\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<CustomModel> | CustomModel)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsGetCustomModelResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsUpdateMockHandler = (\n overrideResponse?:\n | CustomModel\n | ((\n info: Parameters<Parameters<typeof http.patch>[1]>[0]\n ) => Promise<CustomModel> | CustomModel)\n) => {\n return http.patch(\"https://api.cognitive.microsoft.com/models/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsUpdateResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsDeleteMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.cognitive.microsoft.com/models/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getModelsGetBaseModelMockHandler = (\n overrideResponse?:\n | BaseModel\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<BaseModel> | BaseModel)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/base/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsGetBaseModelResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsCopyToMockHandler = (\n overrideResponse?:\n | CustomModel\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<CustomModel> | CustomModel)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/models/:id:copyto\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsCopyToResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsGetCustomModelManifestMockHandler = (\n overrideResponse?:\n | ModelManifest\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ModelManifest> | ModelManifest)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/:id/manifest\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsGetCustomModelManifestResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsGetBaseModelManifestMockHandler = (\n overrideResponse?:\n | ModelManifest\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ModelManifest> | ModelManifest)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/base/:id/manifest\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsGetBaseModelManifestResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsListFilesMockHandler = (\n overrideResponse?:\n | PaginatedFiles\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedFiles> | PaginatedFiles)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/:id/files\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsListFilesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getModelsGetFileMockHandler = (\n overrideResponse?:\n | File\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<File> | File)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/models/:id/files/:fileId\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getModelsGetFileResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsListSupportedLocalesMockHandler = (\n overrideResponse?:\n | string[]\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<string[]> | string[])\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/projects/locales\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsListSupportedLocalesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsListMockHandler = (\n overrideResponse?:\n | PaginatedProjects\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedProjects> | PaginatedProjects)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/projects\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsListResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsCreateMockHandler = (\n overrideResponse?:\n | Project\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<Project> | Project)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/projects\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsCreateResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsGetMockHandler = (\n overrideResponse?:\n | Project\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<Project> | Project)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/projects/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsGetResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsUpdateMockHandler = (\n overrideResponse?:\n | Project\n | ((info: Parameters<Parameters<typeof http.patch>[1]>[0]) => Promise<Project> | Project)\n) => {\n return http.patch(\"https://api.cognitive.microsoft.com/projects/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsUpdateResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsDeleteMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.cognitive.microsoft.com/projects/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getProjectsListEvaluationsMockHandler = (\n overrideResponse?:\n | PaginatedEvaluations\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedEvaluations> | PaginatedEvaluations)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/projects/:id/evaluations\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsListEvaluationsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsListDatasetsMockHandler = (\n overrideResponse?:\n | PaginatedDatasets\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedDatasets> | PaginatedDatasets)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/projects/:id/datasets\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsListDatasetsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsListEndpointsMockHandler = (\n overrideResponse?:\n | PaginatedEndpoints\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedEndpoints> | PaginatedEndpoints)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/projects/:id/endpoints\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsListEndpointsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsListModelsMockHandler = (\n overrideResponse?:\n | PaginatedCustomModels\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedCustomModels> | PaginatedCustomModels)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/projects/:id/models\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsListModelsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getProjectsListTranscriptionsMockHandler = (\n overrideResponse?:\n | PaginatedTranscriptions\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedTranscriptions> | PaginatedTranscriptions)\n) => {\n return http.get(\n \"https://api.cognitive.microsoft.com/projects/:id/transcriptions\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getProjectsListTranscriptionsResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getTranscriptionsListSupportedLocalesMockHandler = (\n overrideResponse?:\n | string[]\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<string[]> | string[])\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/transcriptions/locales\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionsListSupportedLocalesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionsListMockHandler = (\n overrideResponse?:\n | PaginatedTranscriptions\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedTranscriptions> | PaginatedTranscriptions)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/transcriptions\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionsListResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionsCreateMockHandler = (\n overrideResponse?:\n | Transcription\n | ((\n info: Parameters<Parameters<typeof http.post>[1]>[0]\n ) => Promise<Transcription> | Transcription)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/transcriptions\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionsCreateResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionsGetMockHandler = (\n overrideResponse?:\n | Transcription\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<Transcription> | Transcription)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/transcriptions/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionsGetResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionsUpdateMockHandler = (\n overrideResponse?:\n | Transcription\n | ((\n info: Parameters<Parameters<typeof http.patch>[1]>[0]\n ) => Promise<Transcription> | Transcription)\n) => {\n return http.patch(\"https://api.cognitive.microsoft.com/transcriptions/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionsUpdateResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionsDeleteMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.cognitive.microsoft.com/transcriptions/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getTranscriptionsListFilesMockHandler = (\n overrideResponse?:\n | PaginatedFiles\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedFiles> | PaginatedFiles)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/transcriptions/:id/files\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionsListFilesResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getTranscriptionsGetFileMockHandler = (\n overrideResponse?:\n | File\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<File> | File)\n) => {\n return http.get(\n \"https://api.cognitive.microsoft.com/transcriptions/:id/files/:fileId\",\n async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getTranscriptionsGetFileResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n }\n )\n}\n\nexport const getWebHooksListMockHandler = (\n overrideResponse?:\n | PaginatedWebHooks\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<PaginatedWebHooks> | PaginatedWebHooks)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/webhooks\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getWebHooksListResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getWebHooksCreateMockHandler = (\n overrideResponse?:\n | WebHook\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<WebHook> | WebHook)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/webhooks\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getWebHooksCreateResponseMock()\n ),\n { status: 201, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getWebHooksGetMockHandler = (\n overrideResponse?:\n | WebHook\n | ((info: Parameters<Parameters<typeof http.get>[1]>[0]) => Promise<WebHook> | WebHook)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/webhooks/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getWebHooksGetResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getWebHooksUpdateMockHandler = (\n overrideResponse?:\n | WebHook\n | ((info: Parameters<Parameters<typeof http.patch>[1]>[0]) => Promise<WebHook> | WebHook)\n) => {\n return http.patch(\"https://api.cognitive.microsoft.com/webhooks/:id\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getWebHooksUpdateResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\n\nexport const getWebHooksDeleteMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.delete>[1]>[0]) => Promise<void> | void)\n) => {\n return http.delete(\"https://api.cognitive.microsoft.com/webhooks/:id\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 204 })\n })\n}\n\nexport const getWebHooksPingMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<void> | void)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/webhooks/:id:ping\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 202 })\n })\n}\n\nexport const getWebHooksTestMockHandler = (\n overrideResponse?:\n | void\n | ((info: Parameters<Parameters<typeof http.post>[1]>[0]) => Promise<void> | void)\n) => {\n return http.post(\"https://api.cognitive.microsoft.com/webhooks/:id:test\", async (info) => {\n await delay(1000)\n if (typeof overrideResponse === \"function\") {\n await overrideResponse(info)\n }\n return new HttpResponse(null, { status: 202 })\n })\n}\n\nexport const getServiceHealthGetMockHandler = (\n overrideResponse?:\n | ServiceHealth\n | ((\n info: Parameters<Parameters<typeof http.get>[1]>[0]\n ) => Promise<ServiceHealth> | ServiceHealth)\n) => {\n return http.get(\"https://api.cognitive.microsoft.com/healthstatus\", async (info) => {\n await delay(1000)\n\n return new HttpResponse(\n JSON.stringify(\n overrideResponse !== undefined\n ? typeof overrideResponse === \"function\"\n ? await overrideResponse(info)\n : overrideResponse\n : getServiceHealthGetResponseMock()\n ),\n { status: 200, headers: { \"Content-Type\": \"application/json\" } }\n )\n })\n}\nexport const getSpeechServicesAPIV31Mock = () => [\n getDatasetsListSupportedLocalesMockHandler(),\n getDatasetsListMockHandler(),\n getDatasetsCreateMockHandler(),\n getDatasetsGetMockHandler(),\n getDatasetsUpdateMockHandler(),\n getDatasetsDeleteMockHandler(),\n getDatasetsGetBlocksMockHandler(),\n getDatasetsUploadBlockMockHandler(),\n getDatasetsUploadMockHandler(),\n getDatasetsCommitBlocksMockHandler(),\n getDatasetsListFilesMockHandler(),\n getDatasetsGetFileMockHandler(),\n getEndpointsListSupportedLocalesMockHandler(),\n getEndpointsListMockHandler(),\n getEndpointsCreateMockHandler(),\n getEndpointsGetMockHandler(),\n getEndpointsDeleteMockHandler(),\n getEndpointsUpdateMockHandler(),\n getEndpointsListLogsMockHandler(),\n getEndpointsDeleteLogsMockHandler(),\n getEndpointsGetLogMockHandler(),\n getEndpointsDeleteLogMockHandler(),\n getEndpointsListBaseModelLogsMockHandler(),\n getEndpointsDeleteBaseModelLogsMockHandler(),\n getEndpointsGetBaseModelLogMockHandler(),\n getEndpointsDeleteBaseModelLogMockHandler(),\n getEvaluationsListSupportedLocalesMockHandler(),\n getEvaluationsListMockHandler(),\n getEvaluationsCreateMockHandler(),\n getEvaluationsListFilesMockHandler(),\n getEvaluationsGetFileMockHandler(),\n getEvaluationsGetMockHandler(),\n getEvaluationsUpdateMockHandler(),\n getEvaluationsDeleteMockHandler(),\n getModelsListSupportedLocalesMockHandler(),\n getModelsListCustomModelsMockHandler(),\n getModelsCreateMockHandler(),\n getModelsListBaseModelsMockHandler(),\n getModelsGetCustomModelMockHandler(),\n getModelsUpdateMockHandler(),\n getModelsDeleteMockHandler(),\n getModelsGetBaseModelMockHandler(),\n getModelsCopyToMockHandler(),\n getModelsGetCustomModelManifestMockHandler(),\n getModelsGetBaseModelManifestMockHandler(),\n getModelsListFilesMockHandler(),\n getModelsGetFileMockHandler(),\n getProjectsListSupportedLocalesMockHandler(),\n getProjectsListMockHandler(),\n getProjectsCreateMockHandler(),\n getProjectsGetMockHandler(),\n getProjectsUpdateMockHandler(),\n getProjectsDeleteMockHandler(),\n getProjectsListEvaluationsMockHandler(),\n getProjectsListDatasetsMockHandler(),\n getProjectsListEndpointsMockHandler(),\n getProjectsListModelsMockHandler(),\n getProjectsListTranscriptionsMockHandler(),\n getTranscriptionsListSupportedLocalesMockHandler(),\n getTranscriptionsListMockHandler(),\n getTranscriptionsCreateMockHandler(),\n getTranscriptionsGetMockHandler(),\n getTranscriptionsUpdateMockHandler(),\n getTranscriptionsDeleteMockHandler(),\n getTranscriptionsListFilesMockHandler(),\n getTranscriptionsGetFileMockHandler(),\n getWebHooksListMockHandler(),\n getWebHooksCreateMockHandler(),\n getWebHooksGetMockHandler(),\n getWebHooksUpdateMockHandler(),\n getWebHooksDeleteMockHandler(),\n getWebHooksPingMockHandler(),\n getWebHooksTestMockHandler(),\n getServiceHealthGetMockHandler()\n]\n","/**\n * OpenAI Whisper transcription provider adapter\n * Documentation: https://platform.openai.com/docs/guides/speech-to-text\n */\n\nimport axios from \"axios\"\nimport type {\n AudioInput,\n ProviderCapabilities,\n TranscribeOptions,\n UnifiedTranscriptResponse\n} from \"../router/types\"\nimport { BaseAdapter, type ProviderConfig } from \"./base-adapter\"\n\n// Import generated API client function - FULL TYPE SAFETY!\nimport { createTranscription } from \"../generated/openai/api/openAIAPI\"\n\n// Import OpenAI generated types\nimport type { CreateTranscriptionRequest } from \"../generated/openai/schema/createTranscriptionRequest\"\nimport type { CreateTranscriptionResponseVerboseJson } from \"../generated/openai/schema/createTranscriptionResponseVerboseJson\"\nimport type { CreateTranscriptionResponseDiarizedJson } from \"../generated/openai/schema/createTranscriptionResponseDiarizedJson\"\nimport type { AudioTranscriptionModel } from \"../generated/openai/schema/audioTranscriptionModel\"\n\n/**\n * OpenAI Whisper transcription provider adapter\n *\n * Implements transcription for OpenAI's Whisper and GPT-4o transcription models with support for:\n * - Multiple model options: whisper-1, gpt-4o-transcribe, gpt-4o-mini-transcribe, gpt-4o-transcribe-diarize\n * - Speaker diarization (with gpt-4o-transcribe-diarize model)\n * - Word-level timestamps\n * - Multi-language support\n * - Prompt-based style guidance\n * - Known speaker references for improved diarization\n * - Temperature control for output randomness\n *\n * @see https://platform.openai.com/docs/guides/speech-to-text OpenAI Speech-to-Text Documentation\n * @see https://platform.openai.com/docs/api-reference/audio OpenAI Audio API Reference\n *\n * @example Basic transcription\n * ```typescript\n * import { OpenAIWhisperAdapter } from '@meeting-baas/sdk';\n *\n * const adapter = new OpenAIWhisperAdapter();\n * adapter.initialize({\n * apiKey: process.env.OPENAI_API_KEY\n * });\n *\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en'\n * });\n *\n * console.log(result.data.text);\n * ```\n *\n * @example With diarization (speaker identification)\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * }, {\n * language: 'en',\n * diarization: true, // Uses gpt-4o-transcribe-diarize model\n * metadata: {\n * model: 'gpt-4o-transcribe-diarize'\n * }\n * });\n *\n * console.log('Speakers:', result.data.speakers);\n * console.log('Utterances:', result.data.utterances);\n * ```\n *\n * @example With word timestamps and custom model\n * ```typescript\n * const result = await adapter.transcribe(audio, {\n * language: 'en',\n * wordTimestamps: true,\n * metadata: {\n * model: 'gpt-4o-transcribe', // More accurate than whisper-1\n * temperature: 0.2, // Lower temperature for more focused output\n * prompt: 'Expect technical terminology related to AI and machine learning'\n * }\n * });\n *\n * console.log('Words:', result.data.words);\n * ```\n *\n * @example With known speakers for improved diarization\n * ```typescript\n * const result = await adapter.transcribe(audio, {\n * language: 'en',\n * diarization: true,\n * metadata: {\n * model: 'gpt-4o-transcribe-diarize',\n * knownSpeakerNames: ['customer', 'agent'],\n * knownSpeakerReferences: [\n * 'data:audio/wav;base64,...', // Customer voice sample\n * 'data:audio/wav;base64,...' // Agent voice sample\n * ]\n * }\n * });\n *\n * // Speakers will be labeled as 'customer' and 'agent' instead of 'A' and 'B'\n * console.log('Speakers:', result.data.speakers);\n * ```\n */\nexport class OpenAIWhisperAdapter extends BaseAdapter {\n readonly name = \"openai-whisper\" as const\n readonly capabilities: ProviderCapabilities = {\n streaming: false, // Synchronous only (no streaming API for transcription)\n diarization: true, // Available with gpt-4o-transcribe-diarize model\n wordTimestamps: true,\n languageDetection: false, // Language should be provided for best accuracy\n customVocabulary: false, // Uses prompt instead\n summarization: false,\n sentimentAnalysis: false,\n entityDetection: false,\n piiRedaction: false\n }\n\n protected baseUrl = \"https://api.openai.com/v1\"\n\n /**\n * Get axios config for generated API client functions\n * Configures headers and base URL using Bearer token authorization\n */\n protected getAxiosConfig() {\n return super.getAxiosConfig(\"Authorization\", (apiKey) => `Bearer ${apiKey}`)\n }\n\n /**\n * Submit audio for transcription\n *\n * OpenAI Whisper API processes audio synchronously and returns results immediately.\n * Supports multiple models with different capabilities:\n * - whisper-1: Open source Whisper V2 model\n * - gpt-4o-transcribe: More accurate GPT-4o based transcription\n * - gpt-4o-mini-transcribe: Faster, cost-effective GPT-4o mini\n * - gpt-4o-transcribe-diarize: GPT-4o with speaker diarization\n *\n * @param audio - Audio input (URL or Buffer)\n * @param options - Transcription options\n * @returns Transcription response with full results\n */\n async transcribe(\n audio: AudioInput,\n options?: TranscribeOptions\n ): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Fetch audio if URL provided\n let audioData: Buffer | Blob\n let fileName = \"audio.mp3\"\n\n if (audio.type === \"url\") {\n const response = await axios.get(audio.url, {\n responseType: \"arraybuffer\"\n })\n audioData = Buffer.from(response.data)\n\n // Extract filename from URL if possible\n const urlPath = new URL(audio.url).pathname\n const extractedName = urlPath.split(\"/\").pop()\n if (extractedName) {\n fileName = extractedName\n }\n } else if (audio.type === \"file\") {\n audioData = audio.file\n fileName = audio.filename || fileName\n } else {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"INVALID_INPUT\",\n message: \"OpenAI Whisper only supports URL and File audio input (not stream)\"\n }\n }\n }\n\n // Determine model based on options\n const model = this.selectModel(options)\n\n // Set response format based on requirements\n const isDiarization = model === \"gpt-4o-transcribe-diarize\"\n const needsWords = options?.wordTimestamps === true\n\n // Build typed request using generated types\n const request: CreateTranscriptionRequest = {\n file: audioData as any, // Generated type expects Blob\n model: model as AudioTranscriptionModel\n }\n\n // Add optional parameters\n if (options?.language) {\n request.language = options.language\n }\n\n if (options?.metadata?.prompt) {\n request.prompt = options.metadata.prompt as string\n }\n\n if (options?.metadata?.temperature !== undefined) {\n request.temperature = options.metadata.temperature as number\n }\n\n if (isDiarization) {\n // Diarization model returns diarized_json format\n request.response_format = \"diarized_json\"\n\n // Add known speakers if provided\n if (options?.metadata?.knownSpeakerNames) {\n request.known_speaker_names = options.metadata.knownSpeakerNames as string[]\n }\n\n if (options?.metadata?.knownSpeakerReferences) {\n request.known_speaker_references = options.metadata.knownSpeakerReferences as string[]\n }\n } else if (needsWords || options?.diarization) {\n // Use verbose_json for word timestamps\n request.response_format = \"verbose_json\"\n\n // Add timestamp granularities\n if (needsWords) {\n request.timestamp_granularities = [\"word\", \"segment\"]\n }\n } else {\n // Simple json format for basic transcription\n request.response_format = \"json\"\n }\n\n // Use generated API client function - FULLY TYPED!\n const response = await createTranscription(request, this.getAxiosConfig())\n\n return this.normalizeResponse(response.data as any, model, isDiarization)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * OpenAI Whisper returns results synchronously, so getTranscript is not needed.\n * This method exists for interface compatibility but will return an error.\n */\n async getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse> {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"NOT_SUPPORTED\",\n message:\n \"OpenAI Whisper processes transcriptions synchronously. Use transcribe() method directly.\"\n }\n }\n }\n\n /**\n * Select appropriate model based on transcription options\n */\n private selectModel(options?: TranscribeOptions): AudioTranscriptionModel {\n // Use model from metadata if provided\n if (options?.metadata?.model) {\n return options.metadata.model as AudioTranscriptionModel\n }\n\n // Auto-select based on diarization requirement\n if (options?.diarization) {\n return \"gpt-4o-transcribe-diarize\"\n }\n\n // Default to gpt-4o-transcribe (better accuracy than whisper-1)\n return \"gpt-4o-transcribe\"\n }\n\n /**\n * Normalize OpenAI response to unified format\n */\n private normalizeResponse(\n response:\n | CreateTranscriptionResponseVerboseJson\n | CreateTranscriptionResponseDiarizedJson\n | { text: string },\n model: AudioTranscriptionModel,\n isDiarization: boolean\n ): UnifiedTranscriptResponse {\n // Handle simple json format\n if (\"text\" in response && Object.keys(response).length === 1) {\n return {\n success: true,\n provider: this.name,\n data: {\n id: `openai-${Date.now()}`,\n text: response.text,\n status: \"completed\",\n language: undefined,\n confidence: undefined\n },\n raw: response\n }\n }\n\n // Handle diarized format\n if (isDiarization && \"segments\" in response) {\n const diarizedResponse = response as CreateTranscriptionResponseDiarizedJson\n\n // Extract unique speakers\n const speakerSet = new Set(diarizedResponse.segments.map((seg) => seg.speaker))\n const speakers = Array.from(speakerSet).map((speaker) => ({\n id: speaker,\n label: speaker // Already labeled by OpenAI (A, B, C or custom names)\n }))\n\n // Build utterances from segments\n const utterances = diarizedResponse.segments.map((segment) => ({\n speaker: segment.speaker,\n text: segment.text,\n start: segment.start,\n end: segment.end,\n confidence: undefined\n }))\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: `openai-${Date.now()}`,\n text: diarizedResponse.text,\n status: \"completed\",\n language: undefined,\n duration: diarizedResponse.duration,\n speakers,\n utterances\n },\n raw: response\n }\n }\n\n // Handle verbose format\n if (\"duration\" in response && \"language\" in response) {\n const verboseResponse = response as CreateTranscriptionResponseVerboseJson\n\n // Extract words if available\n const words = verboseResponse.words?.map((word) => ({\n text: word.word,\n start: word.start,\n end: word.end,\n confidence: undefined\n }))\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: `openai-${Date.now()}`,\n text: verboseResponse.text,\n status: \"completed\",\n language: verboseResponse.language,\n duration: verboseResponse.duration,\n words\n },\n raw: response\n }\n }\n\n // Fallback (shouldn't reach here)\n return {\n success: true,\n provider: this.name,\n data: {\n id: `openai-${Date.now()}`,\n text: \"text\" in response ? response.text : \"\",\n status: \"completed\"\n },\n raw: response\n }\n }\n}\n\n/**\n * Factory function to create an OpenAI Whisper adapter\n */\nexport function createOpenAIWhisperAdapter(config: ProviderConfig): OpenAIWhisperAdapter {\n const adapter = new OpenAIWhisperAdapter()\n adapter.initialize(config)\n return adapter\n}\n","/**\n * Generated by orval v7.9.0 🍺\n * Do not edit manually.\n * OpenAI API\n * The OpenAI REST API. Please see https://platform.openai.com/docs/api-reference for more details.\n * OpenAPI spec version: 2.3.0\n */\n\nimport type { AxiosRequestConfig, AxiosResponse } from \"axios\"\nimport axios from \"axios\"\n\nimport type {\n CreateFileRequest,\n CreateSpeechRequest,\n CreateSpeechResponseStreamEvent,\n CreateTranscription200One,\n CreateTranscriptionRequest,\n CreateTranscriptionResponseStreamEvent,\n CreateTranslation200,\n CreateTranslationRequest,\n DeleteFileResponse,\n DeleteModelResponse,\n ListFilesParams,\n ListFilesResponse,\n ListModelsResponse,\n Model,\n OpenAIFile\n} from \"../schema\"\n\n/**\n * Generates audio from the input text.\n * @summary Create speech\n */\nexport const createSpeech = <TData = AxiosResponse<Blob | CreateSpeechResponseStreamEvent>>(\n createSpeechRequest: CreateSpeechRequest,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.post(\"/audio/speech\", createSpeechRequest, options)\n}\n\n/**\n * Transcribes audio into the input language.\n * @summary Create transcription\n */\nexport const createTranscription = <\n TData = AxiosResponse<CreateTranscription200One | CreateTranscriptionResponseStreamEvent>\n>(\n createTranscriptionRequest: CreateTranscriptionRequest,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n const formData = new FormData()\n formData.append(\"file\", createTranscriptionRequest.file)\n formData.append(\"model\", createTranscriptionRequest.model)\n if (createTranscriptionRequest.language !== undefined) {\n formData.append(\"language\", createTranscriptionRequest.language)\n }\n if (createTranscriptionRequest.prompt !== undefined) {\n formData.append(\"prompt\", createTranscriptionRequest.prompt)\n }\n if (createTranscriptionRequest.response_format !== undefined) {\n formData.append(\"response_format\", createTranscriptionRequest.response_format)\n }\n if (createTranscriptionRequest.temperature !== undefined) {\n formData.append(\"temperature\", createTranscriptionRequest.temperature.toString())\n }\n if (createTranscriptionRequest.include !== undefined) {\n createTranscriptionRequest.include.forEach((value) => formData.append(\"include\", value))\n }\n if (createTranscriptionRequest.timestamp_granularities !== undefined) {\n createTranscriptionRequest.timestamp_granularities.forEach((value) =>\n formData.append(\"timestamp_granularities\", value)\n )\n }\n if (\n createTranscriptionRequest.stream !== undefined &&\n createTranscriptionRequest.stream !== null\n ) {\n formData.append(\"stream\", createTranscriptionRequest.stream.toString())\n }\n if (\n createTranscriptionRequest.chunking_strategy !== undefined &&\n createTranscriptionRequest.chunking_strategy !== null\n ) {\n formData.append(\n \"chunking_strategy\",\n typeof createTranscriptionRequest.chunking_strategy === \"object\"\n ? JSON.stringify(createTranscriptionRequest.chunking_strategy)\n : createTranscriptionRequest.chunking_strategy\n )\n }\n if (createTranscriptionRequest.known_speaker_names !== undefined) {\n createTranscriptionRequest.known_speaker_names.forEach((value) =>\n formData.append(\"known_speaker_names\", value)\n )\n }\n if (createTranscriptionRequest.known_speaker_references !== undefined) {\n createTranscriptionRequest.known_speaker_references.forEach((value) =>\n formData.append(\"known_speaker_references\", value)\n )\n }\n\n return axios.post(\"/audio/transcriptions\", formData, options)\n}\n\n/**\n * Translates audio into English.\n * @summary Create translation\n */\nexport const createTranslation = <TData = AxiosResponse<CreateTranslation200>>(\n createTranslationRequest: CreateTranslationRequest,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n const formData = new FormData()\n formData.append(\"file\", createTranslationRequest.file)\n formData.append(\"model\", createTranslationRequest.model)\n if (createTranslationRequest.prompt !== undefined) {\n formData.append(\"prompt\", createTranslationRequest.prompt)\n }\n if (createTranslationRequest.response_format !== undefined) {\n formData.append(\"response_format\", createTranslationRequest.response_format)\n }\n if (createTranslationRequest.temperature !== undefined) {\n formData.append(\"temperature\", createTranslationRequest.temperature.toString())\n }\n\n return axios.post(\"/audio/translations\", formData, options)\n}\n\n/**\n * Returns a list of files.\n * @summary List files\n */\nexport const listFiles = <TData = AxiosResponse<ListFilesResponse>>(\n params?: ListFilesParams,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/files\", {\n ...options,\n params: { ...params, ...options?.params }\n })\n}\n\n/**\n * Upload a file that can be used across various endpoints. Individual files\ncan be up to 512 MB, and the size of all files uploaded by one organization\ncan be up to 1 TB.\n\n- The Assistants API supports files up to 2 million tokens and of specific\n file types. See the [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) for\n details.\n- The Fine-tuning API only supports `.jsonl` files. The input also has\n certain required formats for fine-tuning\n [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input) or\n [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) models.\n- The Batch API only supports `.jsonl` files up to 200 MB in size. The input\n also has a specific required\n [format](https://platform.openai.com/docs/api-reference/batch/request-input).\n\nPlease [contact us](https://help.openai.com/) if you need to increase these\nstorage limits.\n\n * @summary Upload file\n */\nexport const createFile = <TData = AxiosResponse<OpenAIFile>>(\n createFileRequest: CreateFileRequest,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n const formData = new FormData()\n formData.append(\"file\", createFileRequest.file)\n formData.append(\"purpose\", createFileRequest.purpose)\n if (createFileRequest.expires_after !== undefined) {\n formData.append(\"expires_after\", JSON.stringify(createFileRequest.expires_after))\n }\n\n return axios.post(\"/files\", formData, options)\n}\n\n/**\n * Delete a file and remove it from all vector stores.\n * @summary Delete file\n */\nexport const deleteFile = <TData = AxiosResponse<DeleteFileResponse>>(\n fileId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/files/${fileId}`, options)\n}\n\n/**\n * Returns information about a specific file.\n * @summary Retrieve file\n */\nexport const retrieveFile = <TData = AxiosResponse<OpenAIFile>>(\n fileId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/files/${fileId}`, options)\n}\n\n/**\n * Returns the contents of the specified file.\n * @summary Retrieve file content\n */\nexport const downloadFile = <TData = AxiosResponse<string>>(\n fileId: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/files/${fileId}/content`, options)\n}\n\n/**\n * Lists the currently available models, and provides basic information about each one such as the owner and availability.\n * @summary List models\n */\nexport const listModels = <TData = AxiosResponse<ListModelsResponse>>(\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(\"/models\", options)\n}\n\n/**\n * Retrieves a model instance, providing basic information about the model such as the owner and permissioning.\n * @summary Retrieve model\n */\nexport const retrieveModel = <TData = AxiosResponse<Model>>(\n model: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.get(`/models/${model}`, options)\n}\n\n/**\n * Delete a fine-tuned model. You must have the Owner role in your organization to delete a model.\n * @summary Delete a fine-tuned model\n */\nexport const deleteModel = <TData = AxiosResponse<DeleteModelResponse>>(\n model: string,\n options?: AxiosRequestConfig\n): Promise<TData> => {\n return axios.delete(`/models/${model}`, options)\n}\n\nexport type CreateSpeechResult = AxiosResponse<Blob | CreateSpeechResponseStreamEvent>\nexport type CreateTranscriptionResult = AxiosResponse<\n CreateTranscription200One | CreateTranscriptionResponseStreamEvent\n>\nexport type CreateTranslationResult = AxiosResponse<CreateTranslation200>\nexport type ListFilesResult = AxiosResponse<ListFilesResponse>\nexport type CreateFileResult = AxiosResponse<OpenAIFile>\nexport type DeleteFileResult = AxiosResponse<DeleteFileResponse>\nexport type RetrieveFileResult = AxiosResponse<OpenAIFile>\nexport type DownloadFileResult = AxiosResponse<string>\nexport type ListModelsResult = AxiosResponse<ListModelsResponse>\nexport type RetrieveModelResult = AxiosResponse<Model>\nexport type DeleteModelResult = AxiosResponse<DeleteModelResponse>\n","/**\n * Speechmatics transcription provider adapter\n * Documentation: https://docs.speechmatics.com/\n */\n\nimport axios, { type AxiosInstance } from \"axios\"\nimport type {\n AudioInput,\n ProviderCapabilities,\n TranscribeOptions,\n UnifiedTranscriptResponse\n} from \"../router/types\"\nimport { BaseAdapter, type ProviderConfig } from \"./base-adapter\"\n\n// Import Speechmatics types (manual definitions - OpenAPI spec doesn't match actual API)\nimport type {\n JobConfig,\n JobSubmitResponse,\n JobDetailsResponse,\n TranscriptionResponse\n} from \"../types/speechmatics\"\n\n/**\n * Speechmatics transcription provider adapter\n *\n * Implements transcription for Speechmatics API with support for:\n * - Batch transcription (async processing)\n * - Speaker diarization\n * - Enhanced accuracy models\n * - Multi-language support\n * - Sentiment analysis\n * - Summarization\n * - Custom vocabulary\n *\n * Note: Types are manually defined due to validation errors in the official OpenAPI spec.\n * See src/generated/speechmatics/schema/index.ts for type definitions.\n *\n * @see https://docs.speechmatics.com/ Speechmatics Documentation\n * @see https://docs.speechmatics.com/introduction/batch-guide Batch API Guide\n *\n * @example Basic transcription\n * ```typescript\n * import { SpeechmaticsAdapter } from '@meeting-baas/sdk';\n *\n * const adapter = new SpeechmaticsAdapter();\n * adapter.initialize({\n * apiKey: process.env.SPEECHMATICS_API_KEY\n * });\n *\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en'\n * });\n *\n * console.log(result.data.text);\n * ```\n *\n * @example With enhanced accuracy and diarization\n * ```typescript\n * const result = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/meeting.mp3'\n * }, {\n * language: 'en',\n * diarization: true,\n * metadata: {\n * operating_point: 'enhanced' // Higher accuracy model\n * }\n * });\n *\n * console.log('Speakers:', result.data.speakers);\n * console.log('Utterances:', result.data.utterances);\n * ```\n *\n * @example Async with polling\n * ```typescript\n * // Submit transcription\n * const submission = await adapter.transcribe({\n * type: 'url',\n * url: 'https://example.com/audio.mp3'\n * }, {\n * language: 'en',\n * summarization: true\n * });\n *\n * const jobId = submission.data?.id;\n * console.log('Job ID:', jobId);\n *\n * // Poll for completion\n * const poll = async () => {\n * const status = await adapter.getTranscript(jobId);\n * if (status.data?.status === 'completed') {\n * console.log('Transcript:', status.data.text);\n * console.log('Summary:', status.data.summary);\n * } else if (status.data?.status === 'processing') {\n * setTimeout(poll, 3000);\n * }\n * };\n * await poll();\n * ```\n */\nexport class SpeechmaticsAdapter extends BaseAdapter {\n readonly name = \"speechmatics\" as const\n readonly capabilities: ProviderCapabilities = {\n streaming: false, // Batch only (streaming available via separate WebSocket API)\n diarization: true,\n wordTimestamps: true,\n languageDetection: false,\n customVocabulary: true,\n summarization: true,\n sentimentAnalysis: true,\n entityDetection: true,\n piiRedaction: false\n }\n\n private client?: AxiosInstance\n protected baseUrl = \"https://asr.api.speechmatics.com/v2\"\n\n initialize(config: ProviderConfig): void {\n super.initialize(config)\n\n this.baseUrl = config.baseUrl || this.baseUrl\n\n this.client = axios.create({\n baseURL: this.baseUrl,\n timeout: config.timeout || 120000,\n headers: {\n Authorization: `Bearer ${config.apiKey}`,\n ...config.headers\n }\n })\n }\n\n /**\n * Submit audio for transcription\n *\n * Speechmatics uses async batch processing. Returns a job ID immediately.\n * Poll getTranscript() to retrieve results.\n *\n * @param audio - Audio input (URL or file)\n * @param options - Transcription options\n * @returns Job submission response with ID for polling\n */\n async transcribe(\n audio: AudioInput,\n options?: TranscribeOptions\n ): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Build job config\n const jobConfig: JobConfig = {\n type: \"transcription\",\n transcription_config: {\n language: options?.language || \"en\",\n operating_point:\n (options?.metadata?.operating_point as \"standard\" | \"enhanced\") || \"standard\"\n }\n }\n\n // Add diarization if requested\n if (options?.diarization) {\n if (!jobConfig.transcription_config) {\n jobConfig.transcription_config = {}\n }\n jobConfig.transcription_config.diarization = \"speaker\"\n if (options.speakersExpected) {\n jobConfig.transcription_config.speaker_diarization_config = {\n max_speakers: options.speakersExpected\n }\n }\n }\n\n // Add sentiment analysis\n if (options?.sentimentAnalysis) {\n if (!jobConfig.transcription_config) {\n jobConfig.transcription_config = {}\n }\n jobConfig.transcription_config.enable_sentiment_analysis = true\n }\n\n // Add summarization\n if (options?.summarization && options?.metadata?.summary_type) {\n if (!jobConfig.transcription_config) {\n jobConfig.transcription_config = {}\n }\n jobConfig.transcription_config.summarization_config = {\n type: options.metadata.summary_type as \"bullets\" | \"brief\" | \"paragraph\",\n length: (options.metadata.summary_length as \"short\" | \"medium\" | \"long\") || \"medium\"\n }\n }\n\n // Add custom vocabulary\n if (options?.customVocabulary && options.customVocabulary.length > 0) {\n if (!jobConfig.transcription_config) {\n jobConfig.transcription_config = {}\n }\n jobConfig.transcription_config.additional_vocab = options.customVocabulary\n }\n\n // Handle audio input\n let requestBody: FormData | Record<string, any>\n let headers: Record<string, string> = {}\n\n if (audio.type === \"url\") {\n // Use fetch_data for URL input (JSON request)\n jobConfig.fetch_data = {\n url: audio.url\n }\n requestBody = { config: JSON.stringify(jobConfig) }\n headers = { \"Content-Type\": \"application/json\" }\n } else if (audio.type === \"file\") {\n // Upload file directly with multipart form\n requestBody = {\n config: JSON.stringify(jobConfig),\n data_file: audio.file\n }\n headers = { \"Content-Type\": \"multipart/form-data\" }\n } else {\n return {\n success: false,\n provider: this.name,\n error: {\n code: \"INVALID_INPUT\",\n message: \"Speechmatics only supports URL and File audio input\"\n }\n }\n }\n\n // Submit job\n const response = await this.client!.post<JobSubmitResponse>(\"/jobs\", requestBody, { headers })\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: response.data.id,\n text: \"\",\n status: \"queued\",\n createdAt: response.data.created_at\n },\n raw: response.data\n }\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Get transcription result by job ID\n *\n * Poll this method to check job status and retrieve completed transcription.\n *\n * @param transcriptId - Job ID from Speechmatics\n * @returns Transcription response with status and results\n */\n async getTranscript(transcriptId: string): Promise<UnifiedTranscriptResponse> {\n this.validateConfig()\n\n try {\n // Check job status first\n const statusResponse = await this.client!.get<JobDetailsResponse>(`/jobs/${transcriptId}`)\n\n const status = this.normalizeStatus(statusResponse.data.job.status)\n\n if (status !== \"completed\") {\n return {\n success: true,\n provider: this.name,\n data: {\n id: transcriptId,\n text: \"\",\n status,\n createdAt: statusResponse.data.job.created_at\n },\n raw: statusResponse.data\n }\n }\n\n // Get transcript if completed\n const transcriptResponse = await this.client!.get<TranscriptionResponse>(\n `/jobs/${transcriptId}/transcript`\n )\n\n return this.normalizeResponse(transcriptResponse.data)\n } catch (error) {\n return this.createErrorResponse(error)\n }\n }\n\n /**\n * Normalize Speechmatics status to unified status\n */\n private normalizeStatus(status: string): \"queued\" | \"processing\" | \"completed\" | \"error\" {\n switch (status) {\n case \"running\":\n return \"processing\"\n case \"done\":\n return \"completed\"\n case \"rejected\":\n case \"expired\":\n return \"error\"\n default:\n return \"queued\"\n }\n }\n\n /**\n * Normalize Speechmatics response to unified format\n */\n private normalizeResponse(response: TranscriptionResponse): UnifiedTranscriptResponse {\n // Extract full text from results\n const text = response.results\n .filter((r) => r.type === \"word\" && r.alternatives)\n .map((r) => r.alternatives![0]?.content || \"\")\n .join(\" \")\n\n // Extract words with timestamps (filter out items without required timestamps)\n const words = response.results\n .filter((r) => r.type === \"word\" && r.start_time !== undefined && r.end_time !== undefined)\n .map((result) => ({\n text: result.alternatives?.[0]?.content || \"\",\n start: result.start_time!,\n end: result.end_time!,\n confidence: result.alternatives?.[0]?.confidence,\n speaker: result.alternatives?.[0]?.speaker\n }))\n\n // Extract speakers if diarization was enabled\n const speakerSet = new Set<string>()\n response.results.forEach((r) => {\n if (r.alternatives) {\n const speaker = r.alternatives[0]?.speaker\n if (speaker) speakerSet.add(speaker)\n }\n })\n\n const speakers =\n speakerSet.size > 0\n ? Array.from(speakerSet).map((id) => ({\n id,\n label: `Speaker ${id}`\n }))\n : undefined\n\n // Build utterances from speaker changes\n const utterances: Array<{\n speaker: string\n text: string\n start: number\n end: number\n }> = []\n\n if (speakers) {\n let currentSpeaker: string | undefined\n let currentUtterance: string[] = []\n let utteranceStart = 0\n\n response.results\n .filter((r) => r.type === \"word\" && r.alternatives)\n .forEach((result, idx) => {\n const speaker = result.alternatives![0]?.speaker\n const word = result.alternatives![0]?.content || \"\"\n\n if (speaker !== currentSpeaker) {\n // Speaker changed - save previous utterance\n if (currentSpeaker && currentUtterance.length > 0) {\n const prevResult = response.results.filter((r) => r.type === \"word\")[idx - 1]\n utterances.push({\n speaker: currentSpeaker,\n text: currentUtterance.join(\" \"),\n start: utteranceStart || 0,\n end: prevResult?.end_time || result.start_time || 0\n })\n }\n\n // Start new utterance\n currentSpeaker = speaker\n currentUtterance = [word]\n utteranceStart = result.start_time || 0\n } else {\n currentUtterance.push(word)\n }\n })\n\n // Add final utterance\n if (currentSpeaker && currentUtterance.length > 0) {\n const lastWord = response.results.filter((r) => r.type === \"word\").pop()\n utterances.push({\n speaker: currentSpeaker,\n text: currentUtterance.join(\" \"),\n start: utteranceStart,\n end: lastWord?.end_time || utteranceStart\n })\n }\n }\n\n return {\n success: true,\n provider: this.name,\n data: {\n id: response.job.id,\n text,\n status: \"completed\",\n language: response.metadata.transcription_config?.language,\n duration: response.job.duration,\n speakers,\n words: words.length > 0 ? words : undefined,\n utterances: utterances.length > 0 ? utterances : undefined,\n summary: response.summary?.content,\n createdAt: response.job.created_at\n },\n raw: response\n }\n }\n}\n\n/**\n * Factory function to create a Speechmatics adapter\n */\nexport function createSpeechmaticsAdapter(config: ProviderConfig): SpeechmaticsAdapter {\n const adapter = new SpeechmaticsAdapter()\n adapter.initialize(config)\n return adapter\n}\n","/**\n * Base webhook handler interface\n * All provider-specific webhook handlers must implement this\n */\n\nimport type { UnifiedWebhookEvent, WebhookValidation, WebhookVerificationOptions } from \"./types\"\nimport type { TranscriptionProvider } from \"../router/types\"\n\n/**\n * Abstract base class for webhook handlers\n *\n * Each provider implements this to parse and normalize their webhook payloads\n */\nexport abstract class BaseWebhookHandler {\n /** Provider name */\n abstract readonly provider: TranscriptionProvider\n\n /**\n * Check if this payload matches this provider's webhook format\n *\n * Used for auto-detection of webhook provider\n *\n * @param payload - Raw webhook payload\n * @param options - Optional context (query params, headers, etc.)\n * @returns true if this handler can process the payload\n *\n * @example\n * ```typescript\n * matches(payload, options) {\n * return typeof payload === 'object' &&\n * 'event' in payload &&\n * 'payload' in payload\n * }\n * ```\n */\n abstract matches(\n payload: unknown,\n options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): boolean\n\n /**\n * Parse and normalize webhook payload\n *\n * Converts provider-specific webhook format to UnifiedWebhookEvent\n *\n * @param payload - Raw webhook payload\n * @param options - Optional context (query params, headers, etc.)\n * @returns Normalized webhook event\n * @throws Error if payload cannot be parsed\n *\n * @example\n * ```typescript\n * parse(payload, options) {\n * const typed = payload as ProviderWebhookPayload\n * return {\n * success: true,\n * provider: this.provider,\n * eventType: 'transcription.completed',\n * data: { id: typed.job_id, ... },\n * timestamp: new Date().toISOString(),\n * raw: payload\n * }\n * }\n * ```\n */\n abstract parse(\n payload: unknown,\n options?: { queryParams?: Record<string, string> }\n ): UnifiedWebhookEvent\n\n /**\n * Verify webhook signature (if provider supports it)\n *\n * Optional method - implement if provider supports webhook signature verification\n *\n * @param payload - Raw webhook payload\n * @param options - Verification options (signature, secret, etc.)\n * @returns true if signature is valid\n *\n * @example\n * ```typescript\n * verify(payload, options) {\n * if (!options.signature || !options.secret) return false\n *\n * const computed = crypto\n * .createHmac('sha256', options.secret)\n * .update(JSON.stringify(payload))\n * .digest('hex')\n *\n * return computed === options.signature\n * }\n * ```\n */\n verify?(payload: unknown, options: WebhookVerificationOptions): boolean\n\n /**\n * Validate webhook payload structure\n *\n * Checks if payload has required fields and correct types\n *\n * @param payload - Raw webhook payload\n * @param options - Optional context (query params, headers, etc.)\n * @returns Validation result with details\n */\n validate(\n payload: unknown,\n options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): WebhookValidation {\n try {\n // Check if this handler matches the payload\n if (!this.matches(payload, options)) {\n return {\n valid: false,\n error: `Payload does not match ${this.provider} webhook format`\n }\n }\n\n // Try to parse the payload\n const event = this.parse(payload, options)\n\n // Basic validation\n if (!event.provider || !event.eventType) {\n return {\n valid: false,\n error: \"Parsed event missing required fields\"\n }\n }\n\n return {\n valid: true,\n provider: this.provider,\n details: {\n eventType: event.eventType,\n success: event.success\n }\n }\n } catch (error) {\n return {\n valid: false,\n error: error instanceof Error ? error.message : \"Unknown error\",\n details: { error }\n }\n }\n }\n\n /**\n * Helper method to create error response\n */\n protected createErrorEvent(payload: unknown, errorMessage: string): UnifiedWebhookEvent {\n return {\n success: false,\n provider: this.provider,\n eventType: \"transcription.failed\",\n data: {\n id: \"\",\n status: \"error\",\n error: errorMessage\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n}\n","/**\n * Gladia webhook handler\n * Parses and normalizes Gladia webhook callbacks\n */\n\nimport type { WebhookTranscriptionSuccessPayload } from \"../generated/gladia/schema/webhookTranscriptionSuccessPayload\"\nimport type { WebhookTranscriptionErrorPayload } from \"../generated/gladia/schema/webhookTranscriptionErrorPayload\"\nimport type { WebhookTranscriptionCreatedPayload } from \"../generated/gladia/schema/webhookTranscriptionCreatedPayload\"\nimport { BaseWebhookHandler } from \"./base-webhook\"\nimport type { UnifiedWebhookEvent } from \"./types\"\nimport type { TranscriptionProvider } from \"../router/types\"\n\n/**\n * Gladia webhook handler\n *\n * Handles webhook callbacks from Gladia API:\n * - transcription.created - Job created and queued\n * - transcription.success - Job completed successfully\n * - transcription.error - Job failed with error\n *\n * @example\n * ```typescript\n * import { GladiaWebhookHandler } from '@meeting-baas/sdk';\n *\n * const handler = new GladiaWebhookHandler();\n *\n * // Validate webhook\n * const validation = handler.validate(req.body);\n * if (!validation.valid) {\n * return res.status(400).json({ error: validation.error });\n * }\n *\n * // Parse webhook\n * const event = handler.parse(req.body);\n * console.log('Event type:', event.eventType);\n * console.log('Job ID:', event.data?.id);\n *\n * if (event.eventType === 'transcription.completed') {\n * console.log('Transcript:', event.data?.text);\n * }\n * ```\n */\nexport class GladiaWebhookHandler extends BaseWebhookHandler {\n readonly provider: TranscriptionProvider = \"gladia\"\n\n /**\n * Check if payload matches Gladia webhook format\n */\n matches(\n payload: unknown,\n _options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): boolean {\n if (!payload || typeof payload !== \"object\") {\n return false\n }\n\n const obj = payload as Record<string, unknown>\n\n // Gladia webhooks have \"event\" and \"payload\" fields\n if (!(\"event\" in obj) || !(\"payload\" in obj)) {\n return false\n }\n\n // Event should be a string starting with \"transcription.\"\n if (typeof obj.event !== \"string\") {\n return false\n }\n\n if (!obj.event.startsWith(\"transcription.\")) {\n return false\n }\n\n // Payload should be an object with \"id\" field\n if (!obj.payload || typeof obj.payload !== \"object\") {\n return false\n }\n\n const payloadObj = obj.payload as Record<string, unknown>\n return typeof payloadObj.id === \"string\"\n }\n\n /**\n * Parse Gladia webhook payload to unified format\n */\n parse(\n payload: unknown,\n _options?: { queryParams?: Record<string, string> }\n ): UnifiedWebhookEvent {\n if (!this.matches(payload)) {\n return this.createErrorEvent(payload, \"Invalid Gladia webhook payload\")\n }\n\n const webhookPayload = payload as\n | WebhookTranscriptionSuccessPayload\n | WebhookTranscriptionErrorPayload\n | WebhookTranscriptionCreatedPayload\n\n const jobId = webhookPayload.payload.id\n const event = webhookPayload.event\n\n // Handle different event types\n if (event === \"transcription.created\") {\n return {\n success: true,\n provider: this.provider,\n eventType: \"transcription.created\",\n data: {\n id: jobId,\n status: \"queued\"\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n if (event === \"transcription.success\") {\n // For success events, we need to fetch the full result\n // The webhook only contains the job ID, not the transcript\n return {\n success: true,\n provider: this.provider,\n eventType: \"transcription.completed\",\n data: {\n id: jobId,\n status: \"completed\"\n // Note: Full transcript data needs to be fetched via API\n // using GladiaAdapter.getTranscript(jobId)\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n if (event === \"transcription.error\") {\n return {\n success: false,\n provider: this.provider,\n eventType: \"transcription.failed\",\n data: {\n id: jobId,\n status: \"error\",\n error: \"Transcription failed\"\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n // Unknown event type\n return this.createErrorEvent(payload, `Unknown Gladia webhook event: ${event}`)\n }\n\n /**\n * Verify Gladia webhook signature\n *\n * Note: As of the current API version, Gladia does not provide\n * webhook signature verification. This method is a placeholder\n * for future implementation.\n *\n * @param payload - Webhook payload\n * @param options - Verification options\n * @returns Always returns true (no verification available)\n */\n verify(): boolean {\n // Gladia does not currently support webhook signature verification\n // Return true to indicate no verification is required\n return true\n }\n}\n\n/**\n * Factory function to create a Gladia webhook handler\n */\nexport function createGladiaWebhookHandler(): GladiaWebhookHandler {\n return new GladiaWebhookHandler()\n}\n","/**\n * AssemblyAI webhook handler\n * Parses and normalizes AssemblyAI webhook callbacks\n */\n\nimport type { TranscriptReadyNotification } from \"../generated/assemblyai/schema/transcriptReadyNotification\"\nimport { BaseWebhookHandler } from \"./base-webhook\"\nimport type { UnifiedWebhookEvent, WebhookVerificationOptions } from \"./types\"\nimport type { TranscriptionProvider } from \"../router/types\"\nimport crypto from \"node:crypto\"\n\n/**\n * AssemblyAI webhook handler\n *\n * Handles webhook callbacks from AssemblyAI API:\n * - completed - Transcription completed successfully\n * - error - Transcription failed with error\n *\n * AssemblyAI supports webhook signature verification using HMAC-SHA256.\n *\n * @example Basic usage\n * ```typescript\n * import { AssemblyAIWebhookHandler } from '@meeting-baas/sdk';\n *\n * const handler = new AssemblyAIWebhookHandler();\n *\n * // Validate webhook\n * const validation = handler.validate(req.body);\n * if (!validation.valid) {\n * return res.status(400).json({ error: validation.error });\n * }\n *\n * // Parse webhook\n * const event = handler.parse(req.body);\n * if (event.eventType === 'transcription.completed') {\n * console.log('Transcript ID:', event.data?.id);\n * }\n * ```\n *\n * @example With signature verification\n * ```typescript\n * // Verify webhook signature\n * const isValid = handler.verify(req.body, {\n * signature: req.headers['x-assemblyai-signature'],\n * secret: process.env.ASSEMBLYAI_WEBHOOK_SECRET,\n * rawBody: req.rawBody\n * });\n *\n * if (!isValid) {\n * return res.status(401).json({ error: 'Invalid signature' });\n * }\n * ```\n */\nexport class AssemblyAIWebhookHandler extends BaseWebhookHandler {\n readonly provider: TranscriptionProvider = \"assemblyai\"\n\n /**\n * Check if payload matches AssemblyAI webhook format\n */\n matches(\n payload: unknown,\n _options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): boolean {\n if (!payload || typeof payload !== \"object\") {\n return false\n }\n\n const obj = payload as Record<string, unknown>\n\n // AssemblyAI webhooks have \"transcript_id\" and \"status\" fields\n if (!(\"transcript_id\" in obj) || !(\"status\" in obj)) {\n return false\n }\n\n // transcript_id should be a string\n if (typeof obj.transcript_id !== \"string\") {\n return false\n }\n\n // status should be \"completed\" or \"error\"\n if (obj.status !== \"completed\" && obj.status !== \"error\") {\n return false\n }\n\n return true\n }\n\n /**\n * Parse AssemblyAI webhook payload to unified format\n */\n parse(\n payload: unknown,\n _options?: { queryParams?: Record<string, string> }\n ): UnifiedWebhookEvent {\n if (!this.matches(payload)) {\n return this.createErrorEvent(payload, \"Invalid AssemblyAI webhook payload\")\n }\n\n const notification = payload as TranscriptReadyNotification\n const transcriptId = notification.transcript_id\n const status = notification.status\n\n if (status === \"completed\") {\n return {\n success: true,\n provider: this.provider,\n eventType: \"transcription.completed\",\n data: {\n id: transcriptId,\n status: \"completed\"\n // Note: Full transcript data needs to be fetched via API\n // using AssemblyAIAdapter.getTranscript(transcriptId)\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n if (status === \"error\") {\n return {\n success: false,\n provider: this.provider,\n eventType: \"transcription.failed\",\n data: {\n id: transcriptId,\n status: \"error\",\n error: \"Transcription failed\"\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n // Unknown status\n return this.createErrorEvent(payload, `Unknown AssemblyAI status: ${status}`)\n }\n\n /**\n * Verify AssemblyAI webhook signature\n *\n * AssemblyAI uses HMAC-SHA256 for webhook signature verification.\n * The signature is sent in the X-AssemblyAI-Signature header.\n *\n * @param payload - Webhook payload\n * @param options - Verification options with signature and secret\n * @returns true if signature is valid\n *\n * @example\n * ```typescript\n * const isValid = handler.verify(req.body, {\n * signature: req.headers['x-assemblyai-signature'],\n * secret: process.env.ASSEMBLYAI_WEBHOOK_SECRET,\n * rawBody: req.rawBody // Raw request body as string or Buffer\n * });\n * ```\n */\n verify(payload: unknown, options: WebhookVerificationOptions): boolean {\n // Need signature and secret to verify\n if (!options.signature || !options.secret) {\n return false\n }\n\n try {\n // Use raw body if provided, otherwise stringify payload\n const body =\n options.rawBody || (typeof payload === \"string\" ? payload : JSON.stringify(payload))\n\n // Compute HMAC-SHA256 signature\n const hmac = crypto.createHmac(\"sha256\", options.secret)\n const bodyBuffer = typeof body === \"string\" ? Buffer.from(body) : body\n hmac.update(bodyBuffer)\n const computedSignature = hmac.digest(\"hex\")\n\n // Compare signatures (constant-time comparison)\n return crypto.timingSafeEqual(Buffer.from(options.signature), Buffer.from(computedSignature))\n } catch (error) {\n // If any error occurs during verification, treat as invalid\n return false\n }\n }\n}\n\n/**\n * Factory function to create an AssemblyAI webhook handler\n */\nexport function createAssemblyAIWebhookHandler(): AssemblyAIWebhookHandler {\n return new AssemblyAIWebhookHandler()\n}\n","/**\n * Deepgram webhook handler\n * Parses and normalizes Deepgram webhook callbacks\n */\n\nimport type { ListenV1Response } from \"../generated/deepgram/schema/listenV1Response\"\nimport { BaseWebhookHandler } from \"./base-webhook\"\nimport type { UnifiedWebhookEvent } from \"./types\"\nimport type { TranscriptionProvider } from \"../router/types\"\n\n/**\n * Deepgram webhook handler\n *\n * Handles webhook callbacks from Deepgram API.\n * Deepgram sends the full transcription response to the callback URL\n * when transcription is complete.\n *\n * Note: Deepgram does not provide webhook signature verification.\n * For security, use HTTPS and validate the request source.\n *\n * @example Basic usage\n * ```typescript\n * import { DeepgramWebhookHandler } from '@meeting-baas/sdk';\n *\n * const handler = new DeepgramWebhookHandler();\n *\n * // Validate webhook\n * const validation = handler.validate(req.body);\n * if (!validation.valid) {\n * return res.status(400).json({ error: validation.error });\n * }\n *\n * // Parse webhook\n * const event = handler.parse(req.body);\n * console.log('Event type:', event.eventType);\n * console.log('Transcript:', event.data?.text);\n * console.log('Speakers:', event.data?.speakers);\n * ```\n *\n * @example Processing completed transcription\n * ```typescript\n * const event = handler.parse(req.body);\n *\n * if (event.eventType === 'transcription.completed') {\n * console.log('Request ID:', event.data?.id);\n * console.log('Transcript:', event.data?.text);\n * console.log('Duration:', event.data?.duration);\n * console.log('Confidence:', event.data?.confidence);\n *\n * // Access word-level timestamps\n * event.data?.words?.forEach(word => {\n * console.log(`${word.text}: ${word.start}s - ${word.end}s`);\n * });\n *\n * // Access speaker diarization\n * event.data?.speakers?.forEach(speaker => {\n * console.log(`Speaker ${speaker.speaker}: ${speaker.text}`);\n * });\n * }\n * ```\n */\nexport class DeepgramWebhookHandler extends BaseWebhookHandler {\n readonly provider: TranscriptionProvider = \"deepgram\"\n\n /**\n * Check if payload matches Deepgram webhook format\n */\n matches(\n payload: unknown,\n _options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): boolean {\n if (!payload || typeof payload !== \"object\") {\n return false\n }\n\n const obj = payload as Record<string, unknown>\n\n // Deepgram callbacks have \"metadata\" and \"results\" fields\n if (!(\"metadata\" in obj) || !(\"results\" in obj)) {\n return false\n }\n\n // metadata should be an object with \"request_id\"\n if (!obj.metadata || typeof obj.metadata !== \"object\") {\n return false\n }\n\n const metadata = obj.metadata as Record<string, unknown>\n if (!(\"request_id\" in metadata)) {\n return false\n }\n\n // results should be an object with \"channels\"\n if (!obj.results || typeof obj.results !== \"object\") {\n return false\n }\n\n const results = obj.results as Record<string, unknown>\n return \"channels\" in results\n }\n\n /**\n * Parse Deepgram webhook payload to unified format\n */\n parse(\n payload: unknown,\n _options?: { queryParams?: Record<string, string> }\n ): UnifiedWebhookEvent {\n if (!this.matches(payload)) {\n return this.createErrorEvent(payload, \"Invalid Deepgram webhook payload\")\n }\n\n const response = payload as ListenV1Response\n\n try {\n // Extract basic info\n const requestId = response.metadata.request_id\n const duration = response.metadata.duration\n const channels = response.results.channels || []\n\n // Deepgram can have multiple channels, we'll use the first one\n if (channels.length === 0) {\n return {\n success: false,\n provider: this.provider,\n eventType: \"transcription.failed\",\n data: {\n id: requestId || \"\",\n status: \"error\",\n error: \"No channels in response\"\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n const channel = channels[0]\n const alternatives = channel.alternatives || []\n\n if (alternatives.length === 0) {\n return {\n success: false,\n provider: this.provider,\n eventType: \"transcription.failed\",\n data: {\n id: requestId || \"\",\n status: \"error\",\n error: \"No alternatives in response\"\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n const alternative = alternatives[0]\n const transcript = alternative.transcript\n\n // Check if transcription was successful\n if (!transcript) {\n return {\n success: false,\n provider: this.provider,\n eventType: \"transcription.failed\",\n data: {\n id: requestId || \"\",\n status: \"error\",\n error: \"Empty transcript\"\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n }\n\n // Extract words (if available)\n const words =\n alternative.words && alternative.words.length > 0\n ? alternative.words.map((word) => ({\n text: word.word || \"\",\n start: word.start || 0,\n end: word.end || 0,\n confidence: word.confidence\n }))\n : undefined\n\n // Extract speakers from utterances (if available)\n const speakers =\n response.results.utterances && response.results.utterances.length > 0\n ? response.results.utterances.map((utterance) => ({\n id: utterance.speaker?.toString() || \"unknown\",\n speaker: utterance.speaker?.toString() || \"unknown\",\n text: utterance.transcript || \"\",\n confidence: utterance.confidence\n }))\n : undefined\n\n // Extract utterances (if available)\n const utterances =\n response.results.utterances && response.results.utterances.length > 0\n ? response.results.utterances.map((utterance) => ({\n text: utterance.transcript || \"\",\n start: utterance.start || 0,\n end: utterance.end || 0,\n speaker: utterance.speaker?.toString(),\n confidence: utterance.confidence,\n words:\n utterance.words && utterance.words.length > 0\n ? utterance.words.map((word) => ({\n text: word.word || \"\",\n start: word.start || 0,\n end: word.end || 0,\n confidence: word.confidence\n }))\n : undefined\n }))\n : undefined\n\n // Extract summary (if available)\n const summary = alternative.summaries?.[0]?.summary\n\n return {\n success: true,\n provider: this.provider,\n eventType: \"transcription.completed\",\n data: {\n id: requestId || \"\",\n status: \"completed\",\n text: transcript,\n confidence: alternative.confidence,\n duration,\n language: response.metadata.models?.[0] || undefined,\n speakers: speakers && speakers.length > 0 ? speakers : undefined,\n words: words && words.length > 0 ? words : undefined,\n utterances: utterances && utterances.length > 0 ? utterances : undefined,\n summary,\n metadata: {\n channels: response.metadata.channels,\n created: response.metadata.created,\n models: response.metadata.models\n }\n },\n timestamp: new Date().toISOString(),\n raw: payload\n }\n } catch (error) {\n return this.createErrorEvent(\n payload,\n `Failed to parse Deepgram webhook: ${error instanceof Error ? error.message : \"Unknown error\"}`\n )\n }\n }\n\n /**\n * Verify Deepgram webhook signature\n *\n * Note: Deepgram does not currently support webhook signature verification.\n * For security, use HTTPS and validate the request source (IP allowlist, etc.).\n *\n * @returns Always returns true (no verification available)\n */\n verify(): boolean {\n // Deepgram does not currently support webhook signature verification\n // Return true to indicate no verification is required\n return true\n }\n}\n\n/**\n * Factory function to create a Deepgram webhook handler\n */\nexport function createDeepgramWebhookHandler(): DeepgramWebhookHandler {\n return new DeepgramWebhookHandler()\n}\n","/**\n * Azure Speech-to-Text webhook handler\n * Parses and normalizes Azure STT webhook callbacks\n */\n\nimport { BaseWebhookHandler } from \"./base-webhook\"\nimport type { UnifiedWebhookEvent, WebhookVerificationOptions } from \"./types\"\nimport type { TranscriptionProvider } from \"../router/types\"\nimport crypto from \"node:crypto\"\n\n/**\n * Azure webhook event payload structure\n * Based on Azure Speech Services v3.1 webhook format\n */\ninterface AzureWebhookPayload {\n /** Event action (e.g., \"TranscriptionCreated\", \"TranscriptionSucceeded\", \"TranscriptionFailed\") */\n action: string\n /** Timestamp of the event */\n timestamp: string\n /** Self-link to the resource */\n self?: string\n /** Additional properties */\n properties?: Record<string, unknown>\n /** Error details (for failed events) */\n error?: {\n code: string\n message: string\n }\n}\n\n/**\n * Azure webhook handler\n *\n * Handles webhook callbacks from Azure Speech Services API:\n * - TranscriptionCreated - Transcription job created\n * - TranscriptionRunning - Transcription is processing\n * - TranscriptionSucceeded - Transcription completed successfully\n * - TranscriptionFailed - Transcription failed with error\n *\n * Azure supports optional webhook signature verification using a shared secret.\n *\n * @example Basic usage\n * ```typescript\n * import { AzureWebhookHandler } from '@meeting-baas/sdk';\n *\n * const handler = new AzureWebhookHandler();\n *\n * // Validate webhook\n * const validation = handler.validate(req.body);\n * if (!validation.valid) {\n * return res.status(400).json({ error: validation.error });\n * }\n *\n * // Parse webhook\n * const event = handler.parse(req.body);\n * console.log('Event type:', event.eventType);\n * console.log('Action:', event.raw.action);\n * ```\n *\n * @example With signature verification\n * ```typescript\n * // Verify webhook signature (if configured in Azure)\n * const isValid = handler.verify(req.body, {\n * signature: req.headers['x-azure-signature'],\n * secret: process.env.AZURE_WEBHOOK_SECRET,\n * rawBody: req.rawBody\n * });\n *\n * if (!isValid) {\n * return res.status(401).json({ error: 'Invalid signature' });\n * }\n * ```\n *\n * @example Processing completed transcription\n * ```typescript\n * const event = handler.parse(req.body);\n *\n * if (event.eventType === 'transcription.completed') {\n * // Extract transcription ID from self link\n * const transcriptionId = event.data?.id;\n *\n * // Fetch full transcript using AzureAdapter.getTranscript(transcriptionId)\n * console.log('Transcription completed:', transcriptionId);\n * }\n * ```\n */\nexport class AzureWebhookHandler extends BaseWebhookHandler {\n readonly provider: TranscriptionProvider = \"azure-stt\"\n\n /**\n * Check if payload matches Azure webhook format\n */\n matches(\n payload: unknown,\n _options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): boolean {\n if (!payload || typeof payload !== \"object\") {\n return false\n }\n\n const obj = payload as Record<string, unknown>\n\n // Azure webhooks have \"action\" and \"timestamp\" fields\n if (!(\"action\" in obj) || !(\"timestamp\" in obj)) {\n return false\n }\n\n // action should be a string\n if (typeof obj.action !== \"string\") {\n return false\n }\n\n // Action should start with \"Transcription\"\n if (!obj.action.startsWith(\"Transcription\")) {\n return false\n }\n\n return true\n }\n\n /**\n * Parse Azure webhook payload to unified format\n */\n parse(\n payload: unknown,\n _options?: { queryParams?: Record<string, string> }\n ): UnifiedWebhookEvent {\n if (!this.matches(payload)) {\n return this.createErrorEvent(payload, \"Invalid Azure webhook payload\")\n }\n\n const webhookPayload = payload as AzureWebhookPayload\n const action = webhookPayload.action\n const timestamp = webhookPayload.timestamp\n\n // Extract transcription ID from self link\n // Format: https://{region}.api.cognitive.microsoft.com/speechtotext/v3.1/transcriptions/{id}\n let transcriptionId = \"\"\n if (webhookPayload.self) {\n const match = webhookPayload.self.match(/\\/transcriptions\\/([^/?]+)/)\n if (match) {\n transcriptionId = match[1]\n }\n }\n\n // Map Azure actions to unified event types\n if (action === \"TranscriptionCreated\") {\n return {\n success: true,\n provider: this.provider,\n eventType: \"transcription.created\",\n data: {\n id: transcriptionId,\n status: \"queued\",\n createdAt: timestamp\n },\n timestamp,\n raw: payload\n }\n }\n\n if (action === \"TranscriptionRunning\") {\n return {\n success: true,\n provider: this.provider,\n eventType: \"transcription.processing\",\n data: {\n id: transcriptionId,\n status: \"processing\"\n },\n timestamp,\n raw: payload\n }\n }\n\n if (action === \"TranscriptionSucceeded\") {\n return {\n success: true,\n provider: this.provider,\n eventType: \"transcription.completed\",\n data: {\n id: transcriptionId,\n status: \"completed\",\n completedAt: timestamp\n // Note: Full transcript data needs to be fetched via API\n // using AzureAdapter.getTranscript(transcriptionId)\n },\n timestamp,\n raw: payload\n }\n }\n\n if (action === \"TranscriptionFailed\") {\n return {\n success: false,\n provider: this.provider,\n eventType: \"transcription.failed\",\n data: {\n id: transcriptionId,\n status: \"error\",\n error: webhookPayload.error?.message || \"Transcription failed\",\n metadata: {\n errorCode: webhookPayload.error?.code\n }\n },\n timestamp,\n raw: payload\n }\n }\n\n // Unknown action\n return this.createErrorEvent(payload, `Unknown Azure webhook action: ${action}`)\n }\n\n /**\n * Verify Azure webhook signature\n *\n * Azure can optionally sign webhooks using HMAC-SHA256.\n * The signature is sent in the X-Azure-Signature header.\n *\n * Note: Signature verification is optional in Azure and must be\n * configured when creating the webhook.\n *\n * @param payload - Webhook payload\n * @param options - Verification options with signature and secret\n * @returns true if signature is valid or no signature provided\n *\n * @example\n * ```typescript\n * const isValid = handler.verify(req.body, {\n * signature: req.headers['x-azure-signature'],\n * secret: process.env.AZURE_WEBHOOK_SECRET,\n * rawBody: req.rawBody\n * });\n * ```\n */\n verify(payload: unknown, options: WebhookVerificationOptions): boolean {\n // If no signature provided, skip verification\n // (Azure webhooks can be configured without signatures)\n if (!options.signature) {\n return true\n }\n\n // Need secret to verify\n if (!options.secret) {\n return false\n }\n\n try {\n // Use raw body if provided, otherwise stringify payload\n const body =\n options.rawBody || (typeof payload === \"string\" ? payload : JSON.stringify(payload))\n\n // Compute HMAC-SHA256 signature\n const hmac = crypto.createHmac(\"sha256\", options.secret)\n const bodyBuffer = typeof body === \"string\" ? Buffer.from(body) : body\n hmac.update(bodyBuffer)\n const computedSignature = hmac.digest(\"hex\")\n\n // Compare signatures (constant-time comparison)\n return crypto.timingSafeEqual(Buffer.from(options.signature), Buffer.from(computedSignature))\n } catch (error) {\n // If any error occurs during verification, treat as invalid\n return false\n }\n }\n}\n\n/**\n * Factory function to create an Azure webhook handler\n */\nexport function createAzureWebhookHandler(): AzureWebhookHandler {\n return new AzureWebhookHandler()\n}\n","/**\n * Speechmatics webhook handler\n * Parses and normalizes Speechmatics webhook callbacks\n */\n\nimport { BaseWebhookHandler } from \"./base-webhook\"\nimport type { UnifiedWebhookEvent } from \"./types\"\nimport type { TranscriptionProvider } from \"../router/types\"\nimport type { TranscriptionResponse } from \"../types/speechmatics\"\n\n/**\n * Speechmatics webhook handler\n *\n * Handles webhook callbacks from Speechmatics API.\n * Speechmatics sends job completion notifications via POST with:\n * - Query parameters: id (job ID) and status (success/error/fetch_error/trim_error)\n * - User agent: \"Speechmatics-API/2.0\"\n * - Body: transcript JSON or multipart data depending on configuration\n *\n * @see https://docs.speechmatics.com/features-other/notifications\n *\n * @example\n * ```typescript\n * import { SpeechmaticsWebhookHandler } from '@meeting-baas/sdk';\n *\n * const handler = new SpeechmaticsWebhookHandler();\n *\n * // Validate webhook\n * const validation = handler.validate(req.body, {\n * queryParams: req.query, // Include query params for status check\n * userAgent: req.headers['user-agent']\n * });\n *\n * if (!validation.valid) {\n * return res.status(400).json({ error: validation.error });\n * }\n *\n * // Parse webhook\n * const event = handler.parse(req.body, {\n * queryParams: req.query\n * });\n *\n * if (event.eventType === 'transcription.completed') {\n * console.log('Transcript:', event.data?.text);\n * }\n * ```\n */\nexport class SpeechmaticsWebhookHandler extends BaseWebhookHandler {\n readonly provider: TranscriptionProvider = \"speechmatics\"\n\n /**\n * Check if payload matches Speechmatics webhook format\n */\n matches(\n payload: unknown,\n options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): boolean {\n // Check user agent if provided\n if (options?.userAgent) {\n if (!options.userAgent.includes(\"Speechmatics-API\")) {\n return false\n }\n }\n\n // Check for required query params\n if (options?.queryParams) {\n const { id, status } = options.queryParams\n if (!id || !status) {\n return false\n }\n }\n\n // Speechmatics can send either JSON or multipart data\n // For JSON transcript, check for expected structure\n if (payload && typeof payload === \"object\") {\n const obj = payload as Record<string, unknown>\n\n // Check for Speechmatics transcript format\n if (\"format\" in obj && \"job\" in obj && \"metadata\" in obj) {\n return true\n }\n\n // Could also be a simple status object\n if (\"job\" in obj || \"id\" in obj) {\n return true\n }\n }\n\n // If we can't determine from payload alone, rely on query params\n return !!options?.queryParams?.id && !!options?.queryParams?.status\n }\n\n /**\n * Validate webhook request\n */\n validate(\n payload: unknown,\n options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): { valid: boolean; error?: string } {\n // Check for required query parameters\n if (!options?.queryParams?.id) {\n return {\n valid: false,\n error: \"Missing required query parameter: id\"\n }\n }\n\n if (!options?.queryParams?.status) {\n return {\n valid: false,\n error: \"Missing required query parameter: status\"\n }\n }\n\n // Validate status value\n const validStatuses = [\"success\", \"error\", \"fetch_error\", \"trim_error\"]\n if (!validStatuses.includes(options.queryParams.status)) {\n return {\n valid: false,\n error: `Invalid status value: ${options.queryParams.status}`\n }\n }\n\n // Optional: Check user agent\n if (options?.userAgent && !options.userAgent.includes(\"Speechmatics-API\")) {\n return {\n valid: false,\n error: \"Invalid user agent (expected Speechmatics-API/2.0)\"\n }\n }\n\n return { valid: true }\n }\n\n /**\n * Parse webhook payload into unified event format\n */\n parse(payload: unknown, options?: { queryParams?: Record<string, string> }): UnifiedWebhookEvent {\n const queryParams = options?.queryParams || {}\n const jobId = queryParams.id\n const status = queryParams.status\n\n // Determine event type based on status\n let eventType: UnifiedWebhookEvent[\"eventType\"]\n if (status === \"success\") {\n eventType = \"transcription.completed\"\n } else if (status === \"error\" || status === \"fetch_error\" || status === \"trim_error\") {\n eventType = \"transcription.failed\"\n } else {\n eventType = \"transcription.created\"\n }\n\n // Parse transcript if available and status is success\n if (status === \"success\" && payload && typeof payload === \"object\") {\n const transcript = payload as TranscriptionResponse\n\n if (transcript.results && transcript.job) {\n // Extract full text\n const text = transcript.results\n .filter((r) => r.type === \"word\" && r.alternatives)\n .map((r) => r.alternatives![0]?.content || \"\")\n .join(\" \")\n\n // Extract speakers if present\n const speakerSet = new Set<string>()\n transcript.results.forEach((r) => {\n if (r.alternatives) {\n const speaker = r.alternatives[0]?.speaker\n if (speaker) speakerSet.add(speaker)\n }\n })\n\n const speakers =\n speakerSet.size > 0\n ? Array.from(speakerSet).map((id) => ({\n id,\n label: `Speaker ${id}`\n }))\n : undefined\n\n return {\n success: true,\n provider: this.provider,\n eventType,\n timestamp: new Date().toISOString(),\n data: {\n id: jobId,\n text,\n status: \"completed\",\n language: transcript.metadata.transcription_config?.language,\n duration: transcript.job.duration,\n speakers,\n createdAt: transcript.job.created_at\n },\n raw: payload\n }\n }\n }\n\n // Return minimal event for non-success or incomplete payloads\n return {\n success: status === \"success\",\n provider: this.provider,\n eventType,\n timestamp: new Date().toISOString(),\n data: {\n id: jobId,\n text: \"\",\n status: status === \"success\" ? \"completed\" : \"error\"\n },\n raw: payload\n }\n }\n}\n","/**\n * Webhook router with automatic provider detection\n * Routes webhook payloads to the correct provider handler\n */\n\nimport type { BaseWebhookHandler } from \"./base-webhook\"\nimport { GladiaWebhookHandler } from \"./gladia-webhook\"\nimport { AssemblyAIWebhookHandler } from \"./assemblyai-webhook\"\nimport { DeepgramWebhookHandler } from \"./deepgram-webhook\"\nimport { AzureWebhookHandler } from \"./azure-webhook\"\nimport { SpeechmaticsWebhookHandler } from \"./speechmatics-webhook\"\nimport type { UnifiedWebhookEvent, WebhookValidation, WebhookVerificationOptions } from \"./types\"\nimport type { TranscriptionProvider } from \"../router/types\"\n\n/**\n * Webhook router options\n */\nexport interface WebhookRouterOptions {\n /**\n * Specific provider to use (skips auto-detection)\n */\n provider?: TranscriptionProvider\n\n /**\n * Webhook verification options (signature, secret, etc.)\n */\n verification?: WebhookVerificationOptions\n\n /**\n * Whether to verify webhook signatures\n * @default true\n */\n verifySignature?: boolean\n\n /**\n * Query parameters from the webhook request\n * (e.g., for Speechmatics: ?id=<job_id>&status=success)\n */\n queryParams?: Record<string, string>\n\n /**\n * User agent from the webhook request headers\n * (e.g., for Speechmatics: \"Speechmatics-API/2.0\")\n */\n userAgent?: string\n}\n\n/**\n * Webhook router result\n */\nexport interface WebhookRouterResult {\n /**\n * Whether routing was successful\n */\n success: boolean\n\n /**\n * Detected or specified provider\n */\n provider?: TranscriptionProvider\n\n /**\n * Parsed unified webhook event\n */\n event?: UnifiedWebhookEvent\n\n /**\n * Error message if routing failed\n */\n error?: string\n\n /**\n * Whether signature verification was performed and passed\n */\n verified?: boolean\n}\n\n/**\n * Webhook router with automatic provider detection\n *\n * Automatically detects the webhook provider from the payload structure\n * and routes to the appropriate handler for parsing and normalization.\n *\n * @example Basic usage with auto-detection\n * ```typescript\n * import { WebhookRouter } from '@meeting-baas/sdk';\n *\n * const router = new WebhookRouter();\n *\n * // Auto-detect provider and parse webhook\n * const result = router.route(req.body);\n *\n * if (result.success) {\n * console.log('Provider:', result.provider);\n * console.log('Event type:', result.event?.eventType);\n * console.log('Transcript ID:', result.event?.data?.id);\n * } else {\n * console.error('Error:', result.error);\n * }\n * ```\n *\n * @example With signature verification\n * ```typescript\n * const router = new WebhookRouter();\n *\n * const result = router.route(req.body, {\n * verification: {\n * signature: req.headers['x-signature'],\n * secret: process.env.WEBHOOK_SECRET,\n * rawBody: req.rawBody\n * }\n * });\n *\n * if (!result.verified) {\n * return res.status(401).json({ error: 'Invalid signature' });\n * }\n * ```\n *\n * @example Specify provider explicitly\n * ```typescript\n * const router = new WebhookRouter();\n *\n * // Skip auto-detection, use specific provider\n * const result = router.route(req.body, {\n * provider: 'gladia'\n * });\n * ```\n *\n * @example Express.js middleware\n * ```typescript\n * import express from 'express';\n * import { WebhookRouter } from '@meeting-baas/sdk';\n *\n * const app = express();\n * const router = new WebhookRouter();\n *\n * app.post('/webhooks/transcription', express.json(), (req, res) => {\n * const result = router.route(req.body, {\n * verification: {\n * signature: req.headers['x-signature'] as string,\n * secret: process.env.WEBHOOK_SECRET!\n * }\n * });\n *\n * if (!result.success) {\n * return res.status(400).json({ error: result.error });\n * }\n *\n * if (!result.verified) {\n * return res.status(401).json({ error: 'Invalid signature' });\n * }\n *\n * // Process webhook event\n * console.log('Received webhook from:', result.provider);\n * console.log('Event:', result.event);\n *\n * res.status(200).json({ received: true });\n * });\n * ```\n */\nexport class WebhookRouter {\n private handlers: Map<TranscriptionProvider, BaseWebhookHandler>\n\n constructor() {\n // Initialize all provider handlers\n this.handlers = new Map([\n [\"gladia\", new GladiaWebhookHandler()],\n [\"assemblyai\", new AssemblyAIWebhookHandler()],\n [\"deepgram\", new DeepgramWebhookHandler()],\n [\"azure-stt\", new AzureWebhookHandler()],\n [\"speechmatics\", new SpeechmaticsWebhookHandler()]\n ])\n }\n\n /**\n * Route webhook payload to the correct handler\n *\n * @param payload - Raw webhook payload\n * @param options - Routing options (provider, verification, etc.)\n * @returns Routing result with parsed event\n */\n route(payload: unknown, options?: WebhookRouterOptions): WebhookRouterResult {\n // If provider is specified, use that handler directly\n if (options?.provider) {\n return this.routeToProvider(payload, options.provider, options)\n }\n\n // Auto-detect provider\n const detectedProvider = this.detectProvider(payload, {\n queryParams: options?.queryParams,\n userAgent: options?.userAgent\n })\n\n if (!detectedProvider) {\n return {\n success: false,\n error: \"Could not detect webhook provider from payload structure\"\n }\n }\n\n return this.routeToProvider(payload, detectedProvider, options)\n }\n\n /**\n * Detect provider from webhook payload structure\n *\n * @param payload - Raw webhook payload\n * @param options - Detection options (query params, user agent, etc.)\n * @returns Detected provider or undefined\n */\n detectProvider(\n payload: unknown,\n options?: { queryParams?: Record<string, string>; userAgent?: string }\n ): TranscriptionProvider | undefined {\n // Try each handler's matches() method\n for (const [provider, handler] of this.handlers) {\n if (handler.matches(payload, options)) {\n return provider\n }\n }\n\n return undefined\n }\n\n /**\n * Validate webhook payload\n *\n * @param payload - Raw webhook payload\n * @param options - Routing options\n * @returns Validation result\n */\n validate(payload: unknown, options?: WebhookRouterOptions): WebhookValidation {\n // If provider is specified, use that handler directly\n if (options?.provider) {\n const handler = this.handlers.get(options.provider)\n if (!handler) {\n return {\n valid: false,\n error: `Unknown provider: ${options.provider}`\n }\n }\n return handler.validate(payload, {\n queryParams: options.queryParams,\n userAgent: options.userAgent\n })\n }\n\n // Auto-detect provider\n const detectedProvider = this.detectProvider(payload, {\n queryParams: options?.queryParams,\n userAgent: options?.userAgent\n })\n\n if (!detectedProvider) {\n return {\n valid: false,\n error: \"Could not detect webhook provider from payload structure\"\n }\n }\n\n const handler = this.handlers.get(detectedProvider)\n if (!handler) {\n return {\n valid: false,\n error: `Handler not found for provider: ${detectedProvider}`\n }\n }\n\n return handler.validate(payload, {\n queryParams: options?.queryParams,\n userAgent: options?.userAgent\n })\n }\n\n /**\n * Verify webhook signature\n *\n * @param payload - Raw webhook payload\n * @param provider - Provider name\n * @param options - Verification options\n * @returns true if signature is valid\n */\n verify(\n payload: unknown,\n provider: TranscriptionProvider,\n options: WebhookVerificationOptions\n ): boolean {\n const handler = this.handlers.get(provider)\n if (!handler || !handler.verify) {\n // No verification available for this provider\n return true\n }\n\n return handler.verify(payload, options)\n }\n\n /**\n * Route to a specific provider handler\n */\n private routeToProvider(\n payload: unknown,\n provider: TranscriptionProvider,\n options?: WebhookRouterOptions\n ): WebhookRouterResult {\n const handler = this.handlers.get(provider)\n\n if (!handler) {\n return {\n success: false,\n error: `Handler not found for provider: ${provider}`\n }\n }\n\n // Verify signature if requested\n let verified = true\n if (options?.verifySignature !== false && options?.verification && handler.verify) {\n verified = handler.verify(payload, options.verification)\n if (!verified) {\n return {\n success: false,\n provider,\n error: \"Webhook signature verification failed\",\n verified: false\n }\n }\n }\n\n // Validate payload\n const validation = handler.validate(payload, {\n queryParams: options?.queryParams,\n userAgent: options?.userAgent\n })\n if (!validation.valid) {\n return {\n success: false,\n provider,\n error: validation.error,\n verified\n }\n }\n\n // Parse payload\n try {\n const event = handler.parse(payload, {\n queryParams: options?.queryParams\n })\n\n return {\n success: true,\n provider,\n event,\n verified\n }\n } catch (error) {\n return {\n success: false,\n provider,\n error: `Failed to parse webhook: ${error instanceof Error ? error.message : \"Unknown error\"}`,\n verified\n }\n }\n }\n\n /**\n * Get handler for a specific provider\n *\n * @param provider - Provider name\n * @returns Handler instance or undefined\n */\n getHandler(provider: TranscriptionProvider): BaseWebhookHandler | undefined {\n return this.handlers.get(provider)\n }\n\n /**\n * Get all registered providers\n *\n * @returns Array of provider names\n */\n getProviders(): TranscriptionProvider[] {\n return Array.from(this.handlers.keys())\n }\n}\n\n/**\n * Factory function to create a webhook router\n */\nexport function createWebhookRouter(): WebhookRouter {\n return new WebhookRouter()\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,yBAAAA;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;AC8FO,IAAM,cAAN,MAAkB;AAAA,EAKvB,YAAY,QAA2B;AAJvC,SAAQ,WAA6D,oBAAI,IAAI;AAE7E,SAAQ,kBAAkB;AAGxB,SAAK,SAAS;AAAA,MACZ,mBAAmB;AAAA,MACnB,GAAG;AAAA,IACL;AAGA,QAAI,OAAO,KAAK,OAAO,SAAS,EAAE,WAAW,GAAG;AAC9C,YAAM,IAAI,MAAM,0DAA0D;AAAA,IAC5E;AAGA,QAAI,KAAK,OAAO,sBAAsB,aAAa,CAAC,KAAK,OAAO,iBAAiB;AAE/E,WAAK,OAAO,kBAAkB,OAAO,KAAK,OAAO,SAAS,EAAE,CAAC;AAAA,IAC/D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBA,gBAAgB,SAAqC;AAEnD,UAAM,iBAAiB,KAAK,OAAO,UAAU,QAAQ,IAAI;AACzD,QAAI,CAAC,gBAAgB;AACnB,YAAM,IAAI,MAAM,wCAAwC,QAAQ,IAAI,EAAE;AAAA,IACxE;AAEA,YAAQ,WAAW,cAAc;AACjC,SAAK,SAAS,IAAI,QAAQ,MAAM,OAAO;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,UAAuD;AAChE,UAAM,UAAU,KAAK,SAAS,IAAI,QAAQ;AAC1C,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI;AAAA,QACR,aAAa,QAAQ,6CAA6C,MAAM,KAAK,KAAK,SAAS,KAAK,CAAC,EAAE,KAAK,IAAI,CAAC;AAAA,MAC/G;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,mBAAkE;AAEvF,QAAI,mBAAmB;AACrB,UAAI,CAAC,KAAK,SAAS,IAAI,iBAAiB,GAAG;AACzC,cAAM,IAAI;AAAA,UACR,aAAa,iBAAiB,6CAA6C,MAAM,KAAK,KAAK,SAAS,KAAK,CAAC,EAAE,KAAK,IAAI,CAAC;AAAA,QACxH;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAGA,YAAQ,KAAK,OAAO,mBAAmB;AAAA,MACrC,KAAK;AACH,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MAEF,KAAK,eAAe;AAClB,cAAM,YAAY,MAAM,KAAK,KAAK,SAAS,KAAK,CAAC;AACjD,cAAM,WAAW,UAAU,KAAK,kBAAkB,UAAU,MAAM;AAClE,aAAK;AACL,eAAO;AAAA,MACT;AAAA,MAEA,KAAK;AAAA,MACL;AACE,YAAI,CAAC,KAAK,OAAO,iBAAiB;AAChC,gBAAM,IAAI,MAAM,gCAAgC;AAAA,QAClD;AACA,eAAO,KAAK,OAAO;AAAA,IACvB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwCA,MAAM,WACJ,OACA,SACoC;AACpC,UAAM,WAAW,KAAK,eAAe,SAAS,QAAQ;AACtD,UAAM,UAAU,KAAK,WAAW,QAAQ;AAGxC,UAAM,EAAE,UAAU,GAAG,GAAG,eAAe,IAAI,WAAW,CAAC;AAEvD,WAAO,QAAQ,WAAW,OAAO,cAAc;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,cACJ,cACA,UACoC;AACpC,UAAM,UAAU,KAAK,WAAW,QAAQ;AACxC,WAAO,QAAQ,cAAc,YAAY;AAAA,EAC3C;AAAA;AAAA,EAuFA,MAAM,iBACJ,SAKA,WAC2B;AAC3B,UAAM,WAAW,KAAK,eAAe,SAAS,QAAQ;AACtD,UAAM,UAAU,KAAK,WAAW,QAAQ;AAGxC,QAAI,CAAC,QAAQ,aAAa,aAAa,CAAC,QAAQ,kBAAkB;AAChE,YAAM,IAAI,MAAM,aAAa,QAAQ,4CAA4C;AAAA,IACnF;AAIA,UAAM,EAAE,UAAU,GAAG,GAAG,eAAe,IAAI,WAAW,CAAC;AAEvD,WAAO,QAAQ,iBAAiB,gBAAoC,SAAS;AAAA,EAC/E;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBACJ,cACA,UAC+B;AAC/B,UAAM,UAAU,KAAK,WAAW,QAAQ;AAExC,QAAI,CAAC,QAAQ,kBAAkB;AAC7B,YAAM,IAAI,MAAM,aAAa,QAAQ,yCAAyC;AAAA,IAChF;AAEA,WAAO,QAAQ,iBAAiB,YAAY;AAAA,EAC9C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,gBACJ,UACA,SASC;AACD,UAAM,UAAU,KAAK,WAAW,QAAQ;AAExC,QAAI,CAAC,QAAQ,iBAAiB;AAC5B,YAAM,IAAI,MAAM,aAAa,QAAQ,wCAAwC;AAAA,IAC/E;AAEA,WAAO,QAAQ,gBAAgB,OAAO;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,wBAAwB,UAAiC;AACvD,UAAM,UAAU,KAAK,WAAW,QAAQ;AACxC,WAAO,QAAQ;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA,EAKA,yBAAkD;AAChD,WAAO,MAAM,KAAK,KAAK,SAAS,KAAK,CAAC;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,qBAAqB,UAA0C;AAC7D,UAAM,UAAU,KAAK,WAAW,QAAQ;AAExC,QAAI,CAAC,QAAQ,cAAc;AACzB,YAAM,IAAI,MAAM,aAAa,QAAQ,gCAAgC;AAAA,IACvE;AAEA,WAAO,QAAQ,aAAa;AAAA,EAC9B;AACF;AAKO,SAAS,kBACd,QACA,UACa;AACb,QAAM,SAAS,IAAI,YAAY,MAAM;AAGrC,MAAI,YAAY,SAAS,SAAS,GAAG;AACnC,eAAW,WAAW,UAAU;AAC9B,aAAO,gBAAgB,OAAO;AAAA,IAChC;AAAA,EACF;AAEA,SAAO;AACT;;;AC/ZO,IAAM,4BAA4B;AAAA,EACvC,UAAU;AAAA,EACV,MAAM;AAAA,EACN,OAAO;AAAA,EACP,MAAM;AAAA,EACN,OAAO;AAAA,EACP,MAAM;AACR;;;ACXO,IAAM,2BAA2B;AAAA,EACtC,UAAU;AAAA,EACV,KAAK;AAAA,EACL,MAAM;AAAA,EACN,KAAK;AAAA,EACL,MAAM;AAAA,EACN,OAAO;AAAA,EACP,MAAM;AACR;;;ACRO,IAAM,4BAA4B;AAAA,EACvC,MAAM;AAAA,EACN,KAAK;AAAA,EACL,KAAK;AACP;;;ACAO,IAAM,6BAA6B;AAAA,EACxC,cAAc;AAAA,EACd,cAAc;AAAA,EACd,cAAc;AAAA,EACd,cAAc;AAAA,EACd,MAAM;AAAA,EACN,aAAa;AAAA,EACb,cAAc;AAChB;;;AC/BO,IAAM,iCAAiC;AAAA,EAC5C,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,YAAY;AACd;;;ACTO,IAAM,mCAAmC;AAAA,EAC9C,aAAa;AAAA,EACb,cAAc;AAAA,EACd,cAAc;AAAA,EACd,cAAc;AAAA,EACd,cAAc;AAChB;;;ACNO,IAAM,iCAAiC;AAAA,EAC5C,UAAU;AAAA,EACV,WAAW;AAAA,EACX,WAAW;AAAA,EACX,WAAW;AACb;;;ACTO,IAAM,mBAAmB;AAAA;AAAA,EAE9B,cAAc;AAAA;AAAA,EAGd,kBAAkB;AAAA;AAAA,EAGlB,eAAe;AAAA;AAAA,EAGf,UAAU;AACZ;AAKO,IAAM,kBAAkB;AAAA;AAAA,EAE7B,cAAc;AAAA;AAAA,EAGd,aAAa;AAAA;AAAA,EAGb,kBAAkB;AACpB;;;ACxBO,IAAM,cAAc;AAAA;AAAA,EAEzB,aAAa;AAAA;AAAA,EAGb,iBAAiB;AAAA;AAAA,EAGjB,iBAAiB;AAAA;AAAA,EAGjB,qBAAqB;AAAA;AAAA,EAGrB,oBAAoB;AAAA;AAAA,EAGpB,eAAe;AAAA;AAAA,EAGf,eAAe;AAAA;AAAA,EAGf,YAAY;AAAA;AAAA,EAGZ,eAAe;AACjB;AASO,IAAM,iBAA4C;AAAA,EACvD,aAAa;AAAA,EACb,iBAAiB;AAAA,EACjB,iBAAiB;AAAA,EACjB,qBAAqB;AAAA,EACrB,oBAAoB;AAAA,EACpB,eAAe;AAAA,EACf,eAAe;AAAA,EACf,YAAY;AAAA,EACZ,eAAe;AACjB;AAmCO,SAAS,YACd,MACA,eACA,SACe;AACf,SAAO;AAAA,IACL;AAAA,IACA,SAAS,iBAAiB,eAAe,IAAI;AAAA,IAC7C;AAAA,EACF;AACF;;;ACoCO,IAAe,cAAf,MAA2D;AAAA,EAWhE,WAAW,QAA8B;AACvC,SAAK,SAAS;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBU,oBACR,OACA,YACA,MAC2B;AAC3B,UAAM,MAAM;AAOZ,UAAM,aAAa,cAAc,IAAI,cAAc,IAAI,UAAU;AACjE,UAAM,iBAAiB,IAAI,UAAU;AACrC,UAAM,eAAe,IAAI,UAAU;AAEnC,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,OAAO;AAAA,QACL,MAAM,QAAQ,IAAI,QAAQ,YAAY;AAAA,QACtC,SAAS,IAAI,WAAW;AAAA,QACxB,YAAY;AAAA,QACZ,SAAS;AAAA;AAAA,UAEP;AAAA;AAAA,UAEA,OAAO,IAAI;AAAA;AAAA,UAEX;AAAA,UACA;AAAA,UACA;AAAA;AAAA,UAEA,UAAU,KAAK;AAAA,QACjB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKU,iBAAuB;AAC/B,QAAI,CAAC,KAAK,QAAQ;AAChB,YAAM,IAAI,MAAM,WAAW,KAAK,IAAI,+CAA+C;AAAA,IACrF;AACA,QAAI,CAAC,KAAK,OAAO,QAAQ;AACvB,YAAM,IAAI,MAAM,2BAA2B,KAAK,IAAI,WAAW;AAAA,IACjE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASU,eACR,iBAAyB,iBACzB,iBAKA;AACA,SAAK,eAAe;AAEpB,UAAM,YAAY,kBAAkB,gBAAgB,KAAK,OAAQ,MAAM,IAAI,KAAK,OAAQ;AAExF,WAAO;AAAA,MACL,SAAS,KAAK,OAAQ,WAAW,KAAK;AAAA,MACtC,SAAS,KAAK,OAAQ,WAAW,iBAAiB;AAAA,MAClD,SAAS;AAAA,QACP,CAAC,cAAc,GAAG;AAAA,QAClB,gBAAgB;AAAA,QAChB,GAAG,KAAK,OAAQ;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAgB,kBACd,cACA,SAIoC;AACpC,UAAM,EAAE,cAAc,gBAAgB,cAAc,aAAa,gBAAgB,YAAY,IAC3F,WAAW,CAAC;AAEd,aAAS,UAAU,GAAG,UAAU,aAAa,WAAW;AACtD,YAAM,SAAS,MAAM,KAAK,cAAc,YAAY;AAEpD,UAAI,CAAC,OAAO,SAAS;AACnB,eAAO;AAAA,MACT;AAEA,YAAM,SAAS,OAAO,MAAM;AAC5B,UAAI,WAAW,aAAa;AAC1B,eAAO;AAAA,MACT;AAEA,UAAI,WAAW,SAAS;AACtB,eAAO,KAAK;AAAA,UACV,IAAI,MAAM,sBAAsB;AAAA,UAChC;AAAA,UACA,YAAY;AAAA,QACd;AAAA,MACF;AAEA,YAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,UAAU,CAAC;AAAA,IAChE;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,OAAO;AAAA,QACL,MAAM,YAAY;AAAA,QAClB,SAAS,wCAAwC,WAAW;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AACF;;;ACtSA,gBAAsB;;;AC4Df,IAAM,sBAA8C;AAAA,EACzD,UAAU;AAAA,EACV,OAAO;AAAA,EACP,MAAM;AACR;AAMO,IAAM,wBAAgD;AAAA,EAC3D,UAAU;AAAA,EACV,OAAO;AAAA,EACP,MAAM;AAAA,EACN,MAAM;AAAA,EACN,OAAO;AAAA,EACP,UAAU;AAAA,EACV,UAAU;AAAA,EACV,MAAM;AACR;AAMO,IAAM,0BAAkD;AAAA,EAC7D,UAAU;AACZ;AAmBO,SAAS,sBACd,iBACA,UACQ;AACR,MAAI;AAEJ,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,gBAAU;AACV;AAAA,IACF,KAAK;AACH,gBAAU;AACV;AAAA,IACF,KAAK;AACH,gBAAU;AACV;AAAA,EACJ;AAEA,QAAM,mBAAmB,QAAQ,eAAe;AAEhD,MAAI,CAAC,kBAAkB;AACrB,UAAM,IAAI;AAAA,MACR,aAAa,eAAe,yBAAyB,QAAQ,0BACnC,OAAO,KAAK,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,IAC3D;AAAA,EACF;AAEA,SAAO;AACT;;;ACjHO,SAAS,qBACd,IACA,YAAoB,iBAAiB,eACtB;AACf,SAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,UAAM,UAAU,WAAW,MAAM;AAC/B,aAAO,IAAI,MAAM,8BAA8B,CAAC;AAAA,IAClD,GAAG,SAAS;AAEZ,OAAG,KAAK,QAAQ,MAAM;AACpB,mBAAa,OAAO;AACpB,cAAQ;AAAA,IACV,CAAC;AAED,OAAG,KAAK,SAAS,CAAC,UAAU;AAC1B,mBAAa,OAAO;AACpB,aAAO,KAAK;AAAA,IACd,CAAC;AAAA,EACH,CAAC;AACH;AAiBO,SAAS,eACd,IACA,YAAoB,iBAAiB,UACtB;AACf,SAAO,IAAI,QAAc,CAAC,YAAY;AACpC,UAAM,UAAU,WAAW,MAAM;AAC/B,SAAG,UAAU;AACb,cAAQ;AAAA,IACV,GAAG,SAAS;AAEZ,OAAG,MAAM;AAET,OAAG,KAAK,SAAS,MAAM;AACrB,mBAAa,OAAO;AACpB,cAAQ;AAAA,IACV,CAAC;AAAA,EACH,CAAC;AACH;AAuBO,SAAS,uBACd,IACA,WACA,kBACM;AACN,KAAG,GAAG,QAAQ,MAAM;AAClB,qBAAiB,MAAM;AACvB,eAAW,SAAS;AAAA,EACtB,CAAC;AAED,KAAG,GAAG,SAAS,CAAC,UAAiB;AAC/B,eAAW,UAAU,YAAY,YAAY,iBAAiB,MAAM,SAAS,KAAK,CAAC;AAAA,EACrF,CAAC;AAED,KAAG,GAAG,SAAS,CAAC,MAAc,WAAmB;AAC/C,qBAAiB,QAAQ;AACzB,eAAW,UAAU,MAAM,OAAO,SAAS,CAAC;AAAA,EAC9C,CAAC;AACH;AAmBO,SAAS,wBACd,eACA,cACA,eACM;AACN,MAAI,kBAAkB,QAAQ;AAC5B,UAAM,IAAI,MAAM,iCAAiC,aAAa,EAAE;AAAA,EAClE;AAEA,MAAI,iBAAiB,eAAe;AAClC,UAAM,IAAI,MAAM,uBAAuB;AAAA,EACzC;AACF;;;ACnGO,SAAS,kBACd,OACA,UACA,WACA,UACY;AACZ,QAAM,cAAc,OAAO,OAAO,QAAQ;AAC1C,QAAM,UAAU,YAAY,KAAK,CAAC,MAAM,MAAM,KAAK;AAEnD,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI;AAAA,MACR,GAAG,QAAQ,qBAAqB,SAAS,KAAK,KAAK,4CACR,YAAY,KAAK,IAAI,CAAC;AAAA,IACnE;AAAA,EACF;AAEA,SAAO;AACT;;;AChCO,SAAS,8BACd,YACA,cACA,aACuB;AACvB,MAAI,CAAC,cAAc,WAAW,WAAW,GAAG;AAC1C,WAAO;AAAA,EACT;AAEA,QAAM,aAAa,oBAAI,IAAY;AAEnC,aAAW,QAAQ,CAAC,cAAc;AAChC,UAAM,YAAY,aAAa,SAAS;AACxC,QAAI,cAAc,QAAW;AAC3B,iBAAW,IAAI,OAAO,SAAS,CAAC;AAAA,IAClC;AAAA,EACF,CAAC;AAED,MAAI,WAAW,SAAS,GAAG;AACzB,WAAO;AAAA,EACT;AAEA,SAAO,MAAM,KAAK,UAAU,EAAE,IAAI,CAAC,eAAe;AAAA,IAChD,IAAI;AAAA,IACJ,OAAO,cAAc,YAAY,SAAS,IAAI,WAAW,SAAS;AAAA,EACpE,EAAE;AACJ;AAwBO,SAAS,aACd,OACA,QACoB;AACpB,MAAI,CAAC,SAAS,MAAM,WAAW,GAAG;AAChC,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,MAAM,IAAI,MAAM;AACxC,SAAO,gBAAgB,SAAS,IAAI,kBAAkB;AACxD;AAQO,IAAM,kBAAkB;AAAA,EAC7B,QAAQ;AAAA,IACN,QAAQ;AAAA,IACR,YAAY;AAAA,IACZ,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA,YAAY;AAAA,IACV,QAAQ;AAAA,IACR,YAAY;AAAA,IACZ,WAAW;AAAA,IACX,OAAO;AAAA,EACT;AAAA,EACA,UAAU;AAAA,IACR,QAAQ;AAAA,IACR,YAAY;AAAA,IACZ,WAAW;AAAA,IACX,OAAO;AAAA,EACT;AAAA,EACA,OAAO;AAAA,IACL,WAAW;AAAA,IACX,SAAS;AAAA,IACT,YAAY;AAAA,IACZ,QAAQ;AAAA,EACV;AAAA,EACA,cAAc;AAAA,IACZ,SAAS;AAAA,IACT,MAAM;AAAA,IACN,UAAU;AAAA,IACV,SAAS;AAAA,EACX;AACF;AA0BO,SAAS,gBACd,gBACA,UACA,gBAAqC,UAChB;AACrB,MAAI,CAAC,eAAgB,QAAO;AAE5B,QAAM,UAAU,gBAAgB,QAAQ;AACxC,QAAM,YAAY,eAAe,SAAS,EAAE,YAAY;AAGxD,MAAI,aAAa,SAAS;AACxB,WAAO,QAAQ,SAAiC;AAAA,EAClD;AAGA,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AAClD,QAAI,UAAU,SAAS,GAAG,GAAG;AAC3B,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;;;ACjLA,mBAAkB;;;ACTlB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACWO,IAAM,2BAA2B;AAAA,EACtC,aAAa;AACf;;;ACHO,IAAM,uBAAuB;AAAA,EAClC,aAAa;AACf;;;ACDO,IAAM,sDAAsD;AAAA,EACjE,WAAW;AAAA,EACX,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,aAAa;AAAA,EACb,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,OAAO;AAAA,EACP,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,WAAW;AAAA,EACX,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,KAAK;AAAA,EACL,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,eAAe;AAAA,EACf,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,OAAO;AAAA,EACP,WAAW;AAAA,EACX,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,EACP,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,MAAM;AAAA,EACN,OAAO;AAAA,EACP,YAAY;AAAA,EACZ,OAAO;AAAA,EACP,SAAS;AAAA,EACT,QAAQ;AACV;;;ACpGO,IAAM,+DAA+D;AAAA,EAC1E,2BAA2B;AAAA,EAC3B,8BAA8B;AAAA,EAC9B,QAAQ;AACV;;;ACJO,IAAM,0DAA0D;AAAA,EACrE,MAAM;AAAA,EACN,KAAK;AAAA,EACL,KAAK;AAAA,EACL,OAAO;AAAA,EACP,KAAK;AACP;;;ACNO,IAAM,uEAAuE;AAAA,EAClF,WAAW;AAAA,EACX,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,aAAa;AAAA,EACb,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,OAAO;AAAA,EACP,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,WAAW;AAAA,EACX,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,KAAK;AAAA,EACL,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,eAAe;AAAA,EACf,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,OAAO;AAAA,EACP,WAAW;AAAA,EACX,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,EACP,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,MAAM;AAAA,EACN,OAAO;AAAA,EACP,YAAY;AAAA,EACZ,OAAO;AAAA,EACP,OAAO;AAAA,EACP,SAAS;AAAA,EACT,QAAQ;AACV;;;ACrGO,IAAM,wCAAwC;AAAA,EACnD,iBAAiB;AACnB;;;ACFO,IAAM,uCAAuC;AAAA,EAClD,mBAAmB;AACrB;;;ACFO,IAAM,qCAAqC;AAAA,EAChD,iBAAiB;AACnB;;;ACFO,IAAM,iDAAiD;AAAA,EAC5D,8BAA8B;AAChC;;;ACFO,IAAM,6CAA6C;AAAA,EACxD,yBAAyB;AAC3B;;;ACFO,IAAM,8CAA8C;AAAA,EACzD,2BAA2B;AAC7B;;;ACFO,IAAM,4CAA4C;AAAA,EACvD,wBAAwB;AAC1B;;;ACFO,IAAM,yCAAyC;AAAA,EACpD,qBAAqB;AACvB;;;ACFO,IAAM,4CAA4C;AAAA,EACvD,wBAAwB;AAC1B;;;ACFO,IAAM,oCAAoC;AAAA,EAC/C,gBAAgB;AAClB;;;ACFO,IAAM,sCAAsC;AAAA,EACjD,kBAAkB;AACpB;;;ACFO,IAAM,yCAAyC;AAAA,EACpD,qBAAqB;AACvB;;;ACFO,IAAM,uCAAuC;AAAA,EAClD,mBAAmB;AACrB;;;ACFO,IAAM,2CAA2C;AAAA,EACtD,oBAAoB;AACtB;;;ACFO,IAAM,qCAAqC;AAAA,EAChD,gBAAgB;AAClB;;;ACFO,IAAM,sCAAsC;AAAA,EACjD,iBAAiB;AACnB;;;ACAO,IAAM,qBAAqB;AAAA,EAChC,MAAM;AAAA,EACN,KAAK;AACP;;;ACFO,IAAM,yCAAyC;AAAA,EACpD,oBAAoB;AACtB;;;ACFO,IAAM,2CAA2C;AAAA,EACtD,sBAAsB;AACxB;;;ACLO,IAAM,0BAA0B;AAAA,EACrC,eAAe;AACjB;;;ACFO,IAAM,wBAAwB;AAAA,EACnC,aAAa;AACf;;;ACFO,IAAM,qCAAqC;AAAA,EAChD,gBAAgB;AAAA,EAChB,MAAM;AACR;;;ACHO,IAAM,uCAAuC;AAAA,EAClD,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,OAAO;AACT;;;ACLO,IAAM,oCAAoC;AAAA,EAC/C,0BAA0B;AAC5B;;;ACFO,IAAM,gCAAgC;AAAA,EAC3C,qBAAqB;AACvB;;;ACFO,IAAM,iCAAiC;AAAA,EAC5C,uBAAuB;AACzB;;;ACFO,IAAM,+BAA+B;AAAA,EAC1C,oBAAoB;AACtB;;;ACFO,IAAM,4BAA4B;AAAA,EACvC,iBAAiB;AACnB;;;ACFO,IAAM,sDAAsD;AAAA,EACjE,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,OAAO;AACT;;;ACLO,IAAM,0BAA0B;AAAA,EACrC,gBAAgB;AAClB;;;ACCO,IAAM,4BAA4B;AAAA,EACvC,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,OAAO;AACT;;;ACRO,IAAM,+BAA+B;AAAA,EAC1C,oBAAoB;AACtB;;;ACHO,IAAM,uBAAuB;AAAA,EAClC,YAAY;AACd;;;ACDO,IAAM,yBAAyB;AAAA,EACpC,cAAc;AAChB;;;ACFO,IAAM,4BAA4B;AAAA,EACvC,iBAAiB;AACnB;;;ACFO,IAAM,0BAA0B;AAAA,EACrC,eAAe;AACjB;;;ACFO,IAAM,8BAA8B;AAAA,EACzC,gBAAgB;AAClB;;;ACFO,IAAM,0BAA0B;AAAA,EACrC,gBAAgB;AAClB;;;ACFO,IAAM,kDAAkD;AAAA,EAC7D,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,OAAO;AACT;;;ACLO,IAAM,wBAAwB;AAAA,EACnC,MAAM;AACR;;;ACCO,IAAM,0BAA0B;AAAA,EACrC,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,OAAO;AACT;;;ACLO,IAAM,2BAA2B;AAAA,EACtC,aAAa;AACf;;;ACLO,IAAM,4BAA4B;AAAA,EACvC,WAAW;AAAA,EACX,WAAW;AACb;;;ACDO,IAAM,sBAAsB;AAAA,EACjC,KAAK;AAAA,EACL,KAAK;AACP;;;ACHO,IAAM,qBAAqB;AAAA,EAChC,SAAS;AAAA,EACT,YAAY;AACd;;;ACHO,IAAM,mBAAmB;AAAA,EAC9B,SAAS;AAAA,EACT,eAAe;AAAA,EACf,SAAS;AACX;;;ACNO,IAAM,wCAAwC;AAAA,EACnD,gBAAgB;AAAA,EAChB,MAAM;AACR;;;ACHO,IAAM,0CAA0C;AAAA,EACrD,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,OAAO;AACT;;;ACFO,IAAM,gCAAgC;AAAA,EAC3C,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,KAAK;AAAA,EACL,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AACN;;;ACvGO,IAAM,wBAAwB;AAAA,EACnC,YAAY;AACd;;;ACCO,IAAM,8BAA8B;AAAA,EACzC,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,KAAK;AAAA,EACL,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AACN;;;ACxGO,IAAM,yBAAyB;AAAA,EACpC,aAAa;AACf;;;ACAO,IAAM,uBAAuB;AAAA,EAClC,MAAM;AAAA,EACN,UAAU;AACZ;;;ACLO,IAAM,sDAAsD;AAAA,EACjE,WAAW;AAAA,EACX,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,aAAa;AAAA,EACb,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,OAAO;AAAA,EACP,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,WAAW;AAAA,EACX,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,KAAK;AAAA,EACL,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,eAAe;AAAA,EACf,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,OAAO;AAAA,EACP,WAAW;AAAA,EACX,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,EACP,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,MAAM;AAAA,EACN,OAAO;AAAA,EACP,YAAY;AAAA,EACZ,OAAO;AAAA,EACP,SAAS;AAAA,EACT,QAAQ;AACV;;;ACpGO,IAAM,+DAA+D;AAAA,EAC1E,2BAA2B;AAAA,EAC3B,8BAA8B;AAAA,EAC9B,QAAQ;AACV;;;ACJO,IAAM,0DAA0D;AAAA,EACrE,MAAM;AAAA,EACN,KAAK;AAAA,EACL,KAAK;AAAA,EACL,OAAO;AAAA,EACP,KAAK;AACP;;;ACNO,IAAM,uEAAuE;AAAA,EAClF,WAAW;AAAA,EACX,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,aAAa;AAAA,EACb,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,OAAO;AAAA,EACP,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,WAAW;AAAA,EACX,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,KAAK;AAAA,EACL,OAAO;AAAA,EACP,SAAS;AAAA,EACT,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,eAAe;AAAA,EACf,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,OAAO;AAAA,EACP,WAAW;AAAA,EACX,SAAS;AAAA,EACT,OAAO;AAAA,EACP,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,EACP,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,WAAW;AAAA,EACX,MAAM;AAAA,EACN,OAAO;AAAA,EACP,YAAY;AAAA,EACZ,OAAO;AAAA,EACP,OAAO;AAAA,EACP,SAAS;AAAA,EACT,QAAQ;AACV;;;ACrGO,IAAM,sCAAsC;AAAA,EACjD,mBAAmB;AACrB;;;ACFO,IAAM,oCAAoC;AAAA,EAC/C,iBAAiB;AACnB;;;ACFO,IAAM,wCAAwC;AAAA,EACnD,qBAAqB;AACvB;;;ACFO,IAAM,sCAAsC;AAAA,EACjD,mBAAmB;AACrB;;;ACFO,IAAM,0CAA0C;AAAA,EACrD,sBAAsB;AACxB;;;ACFO,IAAM,wCAAwC;AAAA,EACnD,oBAAoB;AACtB;;;ACFO,IAAM,0CAA0C;AAAA,EACrD,sBAAsB;AACxB;;;AvEgDO,IAAM,4CAA4C,CAGvD,0BACA,YACmB;AACnB,SAAO,aAAAC,QAAM,KAAK,oBAAoB,0BAA0B,OAAO;AACzE;AAoBO,IAAM,2CAA2C,CAGtD,IACA,YACmB;AACnB,SAAO,aAAAC,QAAM,IAAI,oBAAoB,EAAE,IAAI,OAAO;AACpD;AA6QO,IAAM,4CAA4C,CAGvD,kBACA,QACA,YACmB;AACnB,SAAO,aAAAC,QAAM,KAAK,YAAY,kBAAkB;AAAA,IAC9C,GAAG;AAAA,IACH,QAAQ,EAAE,GAAG,QAAQ,GAAG,SAAS,OAAO;AAAA,EAC1C,CAAC;AACH;;;ALjRO,IAAM,gBAAN,cAA4B,YAAY;AAAA,EAAxC;AAAA;AACL,SAAS,OAAO;AAChB,SAAS,eAAqC;AAAA,MAC5C,WAAW;AAAA,MACX,aAAa;AAAA,MACb,gBAAgB;AAAA,MAChB,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,MACjB,cAAc;AAAA;AAAA,IAChB;AAEA,SAAU,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMV,iBAAiB;AACzB,WAAO,MAAM,eAAe,cAAc;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiEA,MAAM,WACJ,OACA,SACoC;AACpC,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,UAAU,KAAK,0BAA0B,OAAO,OAAO;AAG7D,YAAM,WAAW,MAAM;AAAA,QACrB;AAAA,QACA,KAAK,eAAe;AAAA,MACtB;AAEA,YAAM,QAAQ,SAAS,KAAK;AAG5B,UAAI,SAAS,YAAY;AACvB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,MAAM;AAAA,YACJ,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,QAAQ;AAAA,UACV;AAAA,UACA,KAAK,SAAS;AAAA,QAChB;AAAA,MACF;AAGA,aAAO,MAAM,KAAK,kBAAkB,KAAK;AAAA,IAC3C,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,cAA0D;AAC5E,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,WAAW,MAAM;AAAA,QACrB;AAAA,QACA,KAAK,eAAe;AAAA,MACtB;AAEA,aAAO,KAAK,kBAAkB,SAAS,IAAI;AAAA,IAC7C,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,0BACN,OACA,SAC0B;AAE1B,QAAI;AACJ,QAAI,MAAM,SAAS,OAAO;AACxB,iBAAW,MAAM;AAAA,IACnB,OAAO;AACL,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,UAAoC;AAAA,MACxC,WAAW;AAAA,IACb;AAGA,QAAI,SAAS;AAEX,UAAI,QAAQ,YAAY,QAAQ,mBAAmB;AACjD,gBAAQ,kBAAkB;AAAA,UACxB,WAAW,QAAQ,WACf,CAAC,QAAQ,QAAyC,IAClD;AAAA,UACJ,gBAAgB,QAAQ;AAAA,QAC1B;AAAA,MACF;AAGA,UAAI,QAAQ,aAAa;AACvB,gBAAQ,cAAc;AACtB,YAAI,QAAQ,kBAAkB;AAC5B,kBAAQ,qBAAqB;AAAA,YAC3B,oBAAoB,QAAQ;AAAA,UAC9B;AAAA,QACF;AAAA,MACF;AAGA,UAAI,QAAQ,oBAAoB,QAAQ,iBAAiB,SAAS,GAAG;AACnE,gBAAQ,oBAAoB;AAC5B,gBAAQ,2BAA2B;AAAA,UACjC,YAAY,QAAQ;AAAA,QACtB;AAAA,MACF;AAGA,UAAI,QAAQ,eAAe;AACzB,gBAAQ,gBAAgB;AAAA,MAC1B;AAGA,UAAI,QAAQ,mBAAmB;AAC7B,gBAAQ,qBAAqB;AAAA,MAC/B;AAGA,UAAI,QAAQ,iBAAiB;AAC3B,gBAAQ,2BAA2B;AAAA,MACrC;AAGA,UAAI,QAAQ,YAAY;AACtB,gBAAQ,WAAW;AACnB,gBAAQ,kBAAkB;AAAA,UACxB,KAAK,QAAQ;AAAA,QACf;AAAA,MACF;AAGA,UAAI,QAAQ,UAAU;AACpB,gBAAQ,kBAAkB,QAAQ;AAAA,MACpC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,UAA0D;AAElF,UAAM,SAAS,gBAAgB,SAAS,QAAQ,QAAQ;AAGxD,QAAI,SAAS,WAAW,SAAS;AAC/B,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,OAAO;AAAA,UACL,MAAM,SAAS,YAAY,SAAS,KAAK,YAAY;AAAA,UACrD,SAAS;AAAA,UACT,YAAY,SAAS,cAAc;AAAA,QACrC;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAGA,UAAM,SAAS,SAAS;AACxB,UAAM,gBAAgB,QAAQ;AAE9B,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,MAAM;AAAA,QACJ,IAAI,SAAS;AAAA,QACb,MAAM,eAAe,mBAAmB;AAAA,QACxC,YAAY;AAAA;AAAA,QACZ;AAAA,QACA,UAAU,eAAe,YAAY,CAAC;AAAA;AAAA,QACtC,UAAU;AAAA;AAAA,QACV,UAAU,KAAK,gBAAgB,aAAa;AAAA,QAC5C,OAAO,KAAK,aAAa,aAAa;AAAA,QACtC,YAAY,KAAK,kBAAkB,aAAa;AAAA,QAChD,SAAS,QAAQ,eAAe,WAAW;AAAA,QAC3C,UAAU;AAAA,UACR,eAAe,SAAS;AAAA,UACxB,gBAAgB,SAAS;AAAA,QAC3B;AAAA,QACA,WAAW,SAAS;AAAA,QACpB,aAAa,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,eAA6C;AACnE,WAAO;AAAA,MACL,eAAe;AAAA,MACf,CAAC,cAA4B,UAAU;AAAA,MACvC,CAAC,OAAO,WAAW,EAAE;AAAA,IACvB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,eAA6C;AAChE,QAAI,CAAC,eAAe,YAAY;AAC9B,aAAO;AAAA,IACT;AAGA,UAAM,WAAW,cAAc,WAAW;AAAA,MAAQ,CAAC,cACjD,UAAU,MAAM,IAAI,CAAC,UAAmB;AAAA,QACtC;AAAA,QACA,SAAS,UAAU;AAAA,MACrB,EAAE;AAAA,IACJ;AAEA,WAAO,aAAiB,UAAU,CAAC,UAAU;AAAA,MAC3C,MAAM,KAAK,KAAK;AAAA,MAChB,OAAO,KAAK,KAAK;AAAA,MACjB,KAAK,KAAK,KAAK;AAAA,MACf,YAAY,KAAK,KAAK;AAAA,MACtB,SAAS,KAAK,SAAS,SAAS;AAAA,IAClC,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,eAA6C;AACrE,QAAI,CAAC,eAAe,YAAY;AAC9B,aAAO;AAAA,IACT;AAEA,WAAO,cAAc,WAAW,IAAI,CAAC,eAA6B;AAAA,MAChE,MAAM,UAAU;AAAA,MAChB,OAAO,UAAU;AAAA,MACjB,KAAK,UAAU;AAAA,MACf,SAAS,UAAU,SAAS,SAAS;AAAA,MACrC,YAAY,UAAU;AAAA,MACtB,OAAO,UAAU,MAAM,IAAI,CAAC,UAAmB;AAAA,QAC7C,MAAM,KAAK;AAAA,QACX,OAAO,KAAK;AAAA,QACZ,KAAK,KAAK;AAAA,QACV,YAAY,KAAK;AAAA,MACnB,EAAE;AAAA,IACJ,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6CA,MAAM,iBACJ,SACA,WAC2B;AAC3B,SAAK,eAAe;AAGpB,QAAI;AACJ,QAAI,SAAS,YAAY;AACvB,4BAAsB;AAAA,QACpB,QAAQ;AAAA,QACR;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAGA,UAAM,mBAA8C;AAAA,MAClD,UAAU,SAAS,WACd,sBAAsB,QAAQ,UAAU,QAAQ,IACjD;AAAA,MACJ,aAAa;AAAA,MACb,UAAU,SAAS;AAAA,MACnB,aAAa,SAAS;AAAA,IACxB;AAEA,QAAI,SAAS,UAAU;AACrB,uBAAiB,kBAAkB;AAAA,QACjC,WAAW,CAAC,QAAQ,QAAyC;AAAA,MAC/D;AAAA,IACF;AAGA,UAAM,eAAe,MAAM;AAAA,MACzB;AAAA,MACA;AAAA;AAAA,MACA,KAAK,eAAe;AAAA,IACtB;AAEA,UAAM,EAAE,IAAI,KAAK,MAAM,IAAI,aAAa;AAGxC,UAAM,KAAK,IAAI,UAAAC,QAAU,KAAK;AAE9B,QAAI,gBAA+B;AAGnC,2BAAuB,IAAI,WAAW,CAAC,WAAW;AAChD,sBAAgB;AAAA,IAClB,CAAC;AAED,OAAG,GAAG,WAAW,CAAC,SAAiB;AACjC,UAAI;AACF,cAAM,UAAU,KAAK,MAAM,KAAK,SAAS,CAAC;AAG1C,YAAI,QAAQ,SAAS,cAAc;AAEjC,gBAAM,oBAAoB;AAC1B,gBAAM,cAAc,kBAAkB;AACtC,gBAAM,YAAY,YAAY;AAE9B,qBAAW,eAAe;AAAA,YACxB,MAAM;AAAA,YACN,MAAM,UAAU;AAAA,YAChB,SAAS,YAAY;AAAA,YACrB,YAAY,UAAU;AAAA,YACtB,OAAO,UAAU,MAAM,IAAI,CAAC,UAAU;AAAA,cACpC,MAAM,KAAK;AAAA,cACX,OAAO,KAAK;AAAA,cACZ,KAAK,KAAK;AAAA,cACV,YAAY,KAAK;AAAA,YACnB,EAAE;AAAA,YACF,MAAM;AAAA,UACR,CAAC;AAAA,QACH,WAAW,QAAQ,SAAS,aAAa;AAEvC,gBAAM,oBAAoB;AAC1B,gBAAM,cAAc,kBAAkB;AACtC,gBAAM,YAAY,YAAY;AAE9B,gBAAM,gBAAgB;AAAA,YACpB,MAAM,UAAU;AAAA,YAChB,OAAO,UAAU;AAAA,YACjB,KAAK,UAAU;AAAA,YACf,SAAS,UAAU,SAAS,SAAS;AAAA,YACrC,YAAY,UAAU;AAAA,YACtB,OAAO,UAAU,MAAM,IAAI,CAAC,UAAU;AAAA,cACpC,MAAM,KAAK;AAAA,cACX,OAAO,KAAK;AAAA,cACZ,KAAK,KAAK;AAAA,cACV,YAAY,KAAK;AAAA,YACnB,EAAE;AAAA,UACJ;AACA,qBAAW,cAAc,aAAa;AAAA,QACxC,WAAW,QAAQ,SAAS,YAAY;AACtC,qBAAW,aAAa,OAAO;AAAA,QACjC;AAAA,MACF,SAAS,OAAO;AACd,mBAAW,UAAU;AAAA,UACnB,MAAM,YAAY;AAAA,UAClB,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAGD,UAAM,qBAAqB,EAAE;AAG7B,WAAO;AAAA,MACL;AAAA,MACA,UAAU,KAAK;AAAA,MACf,WAAW,oBAAI,KAAK;AAAA,MACpB,WAAW,MAAM;AAAA,MACjB,WAAW,OAAO,UAAsB;AAEtC,gCAAwB,eAAe,GAAG,YAAY,UAAAA,QAAU,IAAI;AAGpE,WAAG,KAAK,MAAM,IAAI;AAGlB,YAAI,MAAM,QAAQ;AAChB,aAAG;AAAA,YACD,KAAK,UAAU;AAAA,cACb,MAAM;AAAA,YACR,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,MACA,OAAO,YAAY;AACjB,YAAI,kBAAkB,YAAY,kBAAkB,WAAW;AAC7D;AAAA,QACF;AAEA,wBAAgB;AAGhB,YAAI,GAAG,eAAe,UAAAA,QAAU,MAAM;AACpC,aAAG;AAAA,YACD,KAAK,UAAU;AAAA,cACb,MAAM;AAAA,YACR,CAAC;AAAA,UACH;AAAA,QACF;AAGA,cAAM,eAAe,EAAE;AACvB,wBAAgB;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,oBAAoB,QAAuC;AACzE,QAAM,UAAU,IAAI,cAAc;AAClC,UAAQ,WAAW,MAAM;AACzB,SAAO;AACT;;;A6EhoBA,IAAAC,aAAsB;;;ACItB,IAAAC,gBAAkB;;;ACVlB,IAAAC,kBAAA;AAAA,SAAAA,iBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACeO,IAAM,+BAA+B;AAAA,EAC1C,SAAS;AAAA,EACT,aAAa;AACf;;;ACJO,IAAM,aAAa;AAAA,EACxB,gBAAgB;AAAA,EAChB,qBAAqB;AAAA,EACrB,YAAY;AAAA,EACZ,iBAAiB;AAAA,EACjB,wBAAwB;AAAA,EACxB,oBAAoB;AAAA,EACpB,MAAM;AAAA,EACN,eAAe;AAAA,EACf,eAAe;AAAA,EACf,iBAAiB;AAAA,EACjB,MAAM;AAAA,EACN,UAAU;AAAA,EACV,eAAe;AAAA,EACf,OAAO;AAAA,EACP,UAAU;AAAA,EACV,kBAAkB;AAAA,EAClB,mBAAmB;AAAA,EACnB,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EACnB,iBAAiB;AAAA,EACjB,cAAc;AAAA,EACd,aAAa;AAAA,EACb,iBAAiB;AAAA,EACjB,YAAY;AAAA,EACZ,cAAc;AAAA,EACd,iBAAiB;AAAA,EACjB,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,aAAa;AAAA,EACb,cAAc;AAAA,EACd,oBAAoB;AAAA,EACpB,uBAAuB;AAAA,EACvB,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,KAAK;AAAA,EACL,2BAA2B;AAAA,EAC3B,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,aAAa;AACf;;;AC5CO,IAAM,aAAa;AAAA,EACxB,+BAA+B;AAAA,EAC/B,2BAA2B;AAAA,EAC3B,4BAA4B;AAC9B;;;ACLO,IAAM,YAAY;AAAA,EACvB,gBAAgB;AAAA,EAChB,qBAAqB;AAAA,EACrB,YAAY;AAAA,EACZ,iBAAiB;AAAA,EACjB,wBAAwB;AAAA,EACxB,oBAAoB;AAAA,EACpB,MAAM;AAAA,EACN,eAAe;AAAA,EACf,eAAe;AAAA,EACf,iBAAiB;AAAA,EACjB,MAAM;AAAA,EACN,UAAU;AAAA,EACV,eAAe;AAAA,EACf,OAAO;AAAA,EACP,UAAU;AAAA,EACV,kBAAkB;AAAA,EAClB,mBAAmB;AAAA,EACnB,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EACnB,iBAAiB;AAAA,EACjB,cAAc;AAAA,EACd,aAAa;AAAA,EACb,iBAAiB;AAAA,EACjB,YAAY;AAAA,EACZ,cAAc;AAAA,EACd,iBAAiB;AAAA,EACjB,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,aAAa;AAAA,EACb,cAAc;AAAA,EACd,oBAAoB;AAAA,EACpB,uBAAuB;AAAA,EACvB,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,KAAK;AAAA,EACL,2BAA2B;AAAA,EAC3B,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,aAAa;AACf;;;AC7CO,IAAM,sBAAsB;AAAA,EACjC,sBAAsB;AACxB;;;ACDO,IAAM,wBAAwB;AAAA,EACnC,KAAK;AAAA,EACL,KAAK;AACP;;;ACPO,IAAM,YAAY;AAAA,EACvB,UAAU;AAAA,EACV,SAAS;AAAA,EACT,UAAU;AACZ;;;ACDO,IAAM,cAAc;AAAA,EACzB,MAAM;AAAA,EACN,UAAU;AAAA,EACV,WAAW;AACb;;;ACJO,IAAM,qBAAqB;AAAA,EAChC,aAAa;AAAA,EACb,MAAM;AACR;;;ACHO,IAAM,iBAAiB;AAAA,EAC5B,KAAK;AAAA,EACL,KAAK;AACP;;;ACHO,IAAM,eAAe;AAAA,EAC1B,aAAa;AAAA,EACb,gBAAgB;AAAA,EAChB,QAAQ;AACV;;;ACJO,IAAM,cAAc;AAAA,EACzB,SAAS;AAAA,EACT,iBAAiB;AAAA,EACjB,MAAM;AAAA,EACN,UAAU;AAAA,EACV,WAAW;AACb;;;ACNO,IAAM,uBAAuB;AAAA,EAClC,KAAK;AAAA,EACL,SAAS;AAAA,EACT,MAAM;AACR;;;ACDO,IAAM,yBAAyB;AAAA,EACpC,IAAI;AAAA,EACJ,OAAO;AAAA,EACP,OAAO;AAAA,EACP,OAAO;AAAA,EACP,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,KAAK;AAAA,EACL,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,IAAI;AACN;;;ACzGO,IAAM,wBAAwB;AAAA,EACnC,WAAW;AAAA,EACX,OAAO;AACT;;;ACJO,IAAM,mBAAmB;AAAA,EAC9B,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,WAAW;AAAA,EACX,OAAO;AACT;;;AjBkDO,IAAM,mBAAmB,CAC9B,kBACA,YACmB;AACnB,SAAO,cAAAC,QAAM,KAAK,kBAAkB,kBAAkB,OAAO;AAC/D;AA2BO,IAAM,gBAAgB,CAC3B,cACA,YACmB;AACnB,SAAO,cAAAC,QAAM,IAAI,kBAAkB,YAAY,IAAI,OAAO;AAC5D;;;ADXO,IAAM,oBAAN,cAAgC,YAAY;AAAA,EAA5C;AAAA;AACL,SAAS,OAAO;AAChB,SAAS,eAAqC;AAAA,MAC5C,WAAW;AAAA,MACX,aAAa;AAAA,MACb,gBAAgB;AAAA,MAChB,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,MACjB,cAAc;AAAA,IAChB;AAEA,SAAU,UAAU;AACpB;AAAA,SAAQ,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMV,iBAAiB;AACzB,WAAO,MAAM,eAAe,eAAe;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqEA,MAAM,WACJ,OACA,SACoC;AACpC,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,UAAU,KAAK,0BAA0B,OAAO,OAAO;AAG7D,YAAM,WAAW,MAAM,iBAAiB,SAAS,KAAK,eAAe,CAAC;AAEtE,YAAM,eAAe,SAAS,KAAK;AAGnC,UAAI,SAAS,YAAY;AACvB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,MAAM;AAAA,YACJ,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,QAAQ;AAAA,UACV;AAAA,UACA,KAAK,SAAS;AAAA,QAChB;AAAA,MACF;AAGA,aAAO,MAAM,KAAK,kBAAkB,YAAY;AAAA,IAClD,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,cAA0D;AAC5E,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,WAAW,MAAM,cAAiB,cAAc,KAAK,eAAe,CAAC;AAE3E,aAAO,KAAK,kBAAkB,SAAS,IAAI;AAAA,IAC7C,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,0BACN,OACA,SACkB;AAElB,QAAI;AACJ,QAAI,MAAM,SAAS,OAAO;AACxB,iBAAW,MAAM;AAAA,IACnB,OAAO;AACL,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,UAA4B;AAAA,MAChC,WAAW;AAAA,IACb;AAGA,QAAI,SAAS;AAEX,UAAI,QAAQ,UAAU;AAEpB,cAAM,eAAe,QAAQ,SAAS,SAAS,GAAG,IAC9C,QAAQ,WACR,GAAG,QAAQ,QAAQ;AACvB,gBAAQ,gBAAgB;AAAA,MAC1B;AAEA,UAAI,QAAQ,mBAAmB;AAC7B,gBAAQ,qBAAqB;AAAA,MAC/B;AAGA,UAAI,QAAQ,aAAa;AACvB,gBAAQ,iBAAiB;AACzB,YAAI,QAAQ,kBAAkB;AAC5B,kBAAQ,oBAAoB,QAAQ;AAAA,QACtC;AAAA,MACF;AAGA,UAAI,QAAQ,oBAAoB,QAAQ,iBAAiB,SAAS,GAAG;AACnE,gBAAQ,aAAa,QAAQ;AAC7B,gBAAQ,cAAc;AAAA,MACxB;AAGA,UAAI,QAAQ,eAAe;AACzB,gBAAQ,gBAAgB;AACxB,gBAAQ,gBAAgB;AACxB,gBAAQ,eAAe;AAAA,MACzB;AAGA,UAAI,QAAQ,mBAAmB;AAC7B,gBAAQ,qBAAqB;AAAA,MAC/B;AAGA,UAAI,QAAQ,iBAAiB;AAC3B,gBAAQ,mBAAmB;AAAA,MAC7B;AAGA,UAAI,QAAQ,cAAc;AACxB,gBAAQ,aAAa;AAAA,MACvB;AAGA,UAAI,QAAQ,YAAY;AACtB,gBAAQ,cAAc,QAAQ;AAAA,MAChC;AAIA,cAAQ,YAAY;AACpB,cAAQ,cAAc;AAAA,IACxB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,UAAiD;AAEzE,QAAI;AACJ,YAAQ,SAAS,QAAQ;AAAA,MACvB,KAAK;AACH,iBAAS;AACT;AAAA,MACF,KAAK;AACH,iBAAS;AACT;AAAA,MACF,KAAK;AACH,iBAAS;AACT;AAAA,MACF,KAAK;AACH,iBAAS;AACT;AAAA,MACF;AACE,iBAAS;AAAA,IACb;AAGA,QAAI,SAAS,WAAW,SAAS;AAC/B,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,OAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS,SAAS,SAAS;AAAA,QAC7B;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,MAAM;AAAA,QACJ,IAAI,SAAS;AAAA,QACb,MAAM,SAAS,QAAQ;AAAA,QACvB,YAAY,SAAS,eAAe,OAAO,SAAS,aAAa;AAAA,QACjE;AAAA,QACA,UAAU,SAAS;AAAA,QACnB,UAAU,SAAS,iBAAiB,SAAS,iBAAiB,MAAO;AAAA;AAAA,QACrE,UAAU,KAAK,gBAAgB,QAAQ;AAAA,QACvC,OAAO,KAAK,aAAa,QAAQ;AAAA,QACjC,YAAY,KAAK,kBAAkB,QAAQ;AAAA,QAC3C,SAAS,SAAS,WAAW;AAAA,QAC7B,UAAU;AAAA,UACR,UAAU,SAAS;AAAA,UACnB,UAAU,SAAS;AAAA,UACnB,mBAAmB,SAAS;AAAA,UAC5B,mBAAmB,SAAS;AAAA,QAC9B;AAAA,MACF;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,YAAwB;AAC9C,QAAI,CAAC,WAAW,cAAc,WAAW,WAAW,WAAW,GAAG;AAChE,aAAO;AAAA,IACT;AAGA,UAAM,aAAa,oBAAI,IAAY;AACnC,eAAW,WAAW,QAAQ,CAAC,cAAmC;AAChE,UAAI,UAAU,SAAS;AACrB,mBAAW,IAAI,UAAU,OAAO;AAAA,MAClC;AAAA,IACF,CAAC;AAED,QAAI,WAAW,SAAS,GAAG;AACzB,aAAO;AAAA,IACT;AAEA,WAAO,MAAM,KAAK,UAAU,EAAE,IAAI,CAAC,eAAe;AAAA,MAChD,IAAI;AAAA,MACJ,OAAO;AAAA;AAAA,IACT,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,YAAwB;AAC3C,QAAI,CAAC,WAAW,SAAS,WAAW,MAAM,WAAW,GAAG;AACtD,aAAO;AAAA,IACT;AAEA,WAAO,WAAW,MAAM,IAAI,CAAC,UAA0B;AAAA,MACrD,MAAM,KAAK;AAAA,MACX,OAAO,KAAK,QAAQ;AAAA;AAAA,MACpB,KAAK,KAAK,MAAM;AAAA;AAAA,MAChB,YAAY,KAAK;AAAA,MACjB,SAAS,KAAK,WAAW;AAAA,IAC3B,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,YAAwB;AAChD,QAAI,CAAC,WAAW,cAAc,WAAW,WAAW,WAAW,GAAG;AAChE,aAAO;AAAA,IACT;AAEA,WAAO,WAAW,WAAW,IAAI,CAAC,eAAoC;AAAA,MACpE,MAAM,UAAU;AAAA,MAChB,OAAO,UAAU,QAAQ;AAAA;AAAA,MACzB,KAAK,UAAU,MAAM;AAAA;AAAA,MACrB,SAAS,UAAU,WAAW;AAAA,MAC9B,YAAY,UAAU;AAAA,MACtB,OAAO,UAAU,MAAM,IAAI,CAAC,UAA0B;AAAA,QACpD,MAAM,KAAK;AAAA,QACX,OAAO,KAAK,QAAQ;AAAA,QACpB,KAAK,KAAK,MAAM;AAAA,QAChB,YAAY,KAAK;AAAA,MACnB,EAAE;AAAA,IACJ,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwCA,MAAM,iBACJ,SACA,WAC2B;AAC3B,SAAK,eAAe;AAEpB,QAAI,CAAC,KAAK,QAAQ,QAAQ;AACxB,YAAM,IAAI,MAAM,mCAAmC;AAAA,IACrD;AAIA,UAAM,aAAa,SAAS,cAAc;AAE1C,UAAM,WAAW,SAAS,WACtB,sBAAsB,QAAQ,UAAU,YAAY,IACpD;AACJ,UAAM,QAAQ,GAAG,KAAK,SAAS,gBAAgB,UAAU,aAAa,QAAQ;AAG9E,UAAM,KAAK,IAAI,WAAAC,QAAU,OAAO;AAAA,MAC9B,SAAS;AAAA,QACP,eAAe,KAAK,OAAO;AAAA,MAC7B;AAAA,IACF,CAAC;AAED,QAAI,gBAA8D;AAClE,UAAM,YAAY,cAAc,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,UAAU,CAAC,CAAC;AAGrF,OAAG,GAAG,QAAQ,MAAM;AAClB,sBAAgB;AAChB,iBAAW,SAAS;AAAA,IACtB,CAAC;AAED,OAAG,GAAG,WAAW,CAAC,SAAiB;AACjC,UAAI;AACF,cAAM,UAAU,KAAK,MAAM,KAAK,SAAS,CAAC;AAI1C,YAAI,WAAW,SAAS;AAEtB,qBAAW,UAAU;AAAA,YACnB,MAAM;AAAA,YACN,SAAU,QAAuB;AAAA,UACnC,CAAC;AACD;AAAA,QACF;AAGA,YAAK,QAAsD,SAAS,SAAS;AAE3E,gBAAM,WAAW;AACjB,qBAAW,aAAa;AAAA,YACtB,WAAW,SAAS;AAAA,YACpB,WAAW,IAAI,KAAK,SAAS,UAAU,EAAE,YAAY;AAAA,UACvD,CAAC;AAAA,QACH,WAAY,QAAsD,SAAS,QAAQ;AAEjF,gBAAM,UAAU;AAEhB,qBAAW,eAAe;AAAA,YACxB,MAAM;AAAA,YACN,MAAM,QAAQ;AAAA,YACd,SAAS,QAAQ;AAAA,YACjB,YAAY,QAAQ;AAAA,YACpB,OAAO,QAAQ,MAAM,IAAI,CAAC,UAAyB;AAAA,cACjD,MAAM,KAAK;AAAA,cACX,OAAO,KAAK,QAAQ;AAAA;AAAA,cACpB,KAAK,KAAK,MAAM;AAAA,cAChB,YAAY,KAAK;AAAA,YACnB,EAAE;AAAA,YACF,MAAM;AAAA,UACR,CAAC;AAAA,QACH,WAAY,QAAsD,SAAS,eAAe;AAExF,gBAAM,UAAU;AAChB,qBAAW,aAAa;AAAA,YACtB,YAAY;AAAA,YACZ,sBAAsB,QAAQ;AAAA,YAC9B,wBAAwB,QAAQ;AAAA,UAClC,CAAC;AAAA,QACH;AAAA,MACF,SAAS,OAAO;AACd,mBAAW,UAAU;AAAA,UACnB,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAED,OAAG,GAAG,SAAS,CAAC,UAAiB;AAC/B,iBAAW,UAAU;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,MAAM;AAAA,QACf,SAAS;AAAA,MACX,CAAC;AAAA,IACH,CAAC;AAED,OAAG,GAAG,SAAS,CAAC,MAAc,WAAmB;AAC/C,sBAAgB;AAChB,iBAAW,UAAU,MAAM,OAAO,SAAS,CAAC;AAAA,IAC9C,CAAC;AAGD,UAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,YAAM,UAAU,WAAW,MAAM;AAC/B,eAAO,IAAI,MAAM,8BAA8B,CAAC;AAAA,MAClD,GAAG,GAAK;AAER,SAAG,KAAK,QAAQ,MAAM;AACpB,qBAAa,OAAO;AACpB,gBAAQ;AAAA,MACV,CAAC;AAED,SAAG,KAAK,SAAS,CAAC,UAAU;AAC1B,qBAAa,OAAO;AACpB,eAAO,KAAK;AAAA,MACd,CAAC;AAAA,IACH,CAAC;AAGD,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,UAAU,KAAK;AAAA,MACf,WAAW,oBAAI,KAAK;AAAA,MACpB,WAAW,MAAM;AAAA,MACjB,WAAW,OAAO,UAAsB;AACtC,YAAI,kBAAkB,QAAQ;AAC5B,gBAAM,IAAI,MAAM,iCAAiC,aAAa,EAAE;AAAA,QAClE;AAEA,YAAI,GAAG,eAAe,WAAAA,QAAU,MAAM;AACpC,gBAAM,IAAI,MAAM,uBAAuB;AAAA,QACzC;AAGA,cAAM,cAAc,MAAM,KAAK,SAAS,QAAQ;AAGhD,WAAG;AAAA,UACD,KAAK,UAAU;AAAA,YACb,YAAY;AAAA,UACd,CAAC;AAAA,QACH;AAGA,YAAI,MAAM,QAAQ;AAChB,aAAG;AAAA,YACD,KAAK,UAAU;AAAA,cACb,mBAAmB;AAAA,YACrB,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,MACA,OAAO,YAAY;AACjB,YAAI,kBAAkB,YAAY,kBAAkB,WAAW;AAC7D;AAAA,QACF;AAEA,wBAAgB;AAGhB,YAAI,GAAG,eAAe,WAAAA,QAAU,MAAM;AACpC,aAAG;AAAA,YACD,KAAK,UAAU;AAAA,cACb,mBAAmB;AAAA,YACrB,CAAC;AAAA,UACH;AAAA,QACF;AAGA,eAAO,IAAI,QAAc,CAAC,YAAY;AACpC,gBAAM,UAAU,WAAW,MAAM;AAC/B,eAAG,UAAU;AACb,oBAAQ;AAAA,UACV,GAAG,GAAI;AAEP,aAAG,MAAM;AAET,aAAG,KAAK,SAAS,MAAM;AACrB,yBAAa,OAAO;AACpB,4BAAgB;AAChB,oBAAQ;AAAA,UACV,CAAC;AAAA,QACH,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,wBAAwB,QAA2C;AACjF,QAAM,UAAU,IAAI,kBAAkB;AACtC,UAAQ,WAAW,MAAM;AACzB,SAAO;AACT;;;AmB7qBA,IAAAC,gBAA0C;AAC1C,IAAAC,aAAsB;AAyGf,IAAM,kBAAN,cAA8B,YAAY;AAAA,EAA1C;AAAA;AACL,SAAS,OAAO;AAChB,SAAS,eAAqC;AAAA,MAC5C,WAAW;AAAA,MACX,aAAa;AAAA,MACb,gBAAgB;AAAA,MAChB,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,MACjB,cAAc;AAAA,IAChB;AAGA,SAAU,UAAU;AACpB,SAAQ,YAAY;AAAA;AAAA,EAEpB,WAAW,QAA8B;AACvC,UAAM,WAAW,MAAM;AAEvB,SAAK,SAAS,cAAAC,QAAM,OAAO;AAAA,MACzB,SAAS,OAAO,WAAW,KAAK;AAAA,MAChC,SAAS,OAAO,WAAW;AAAA,MAC3B,SAAS;AAAA,QACP,eAAe,SAAS,OAAO,MAAM;AAAA,QACrC,gBAAgB;AAAA,QAChB,GAAG,OAAO;AAAA,MACZ;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6CA,MAAM,WACJ,OACA,SACoC;AACpC,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,SAAS,KAAK,yBAAyB,OAAO;AAEpD,UAAI;AAEJ,UAAI,MAAM,SAAS,OAAO;AAExB,mBAAW,MAAM,KAAK,OAAQ;AAAA,UAC5B;AAAA,UACA,EAAE,KAAK,MAAM,IAAI;AAAA,UACjB,EAAE,OAAO;AAAA,QACX,EAAE,KAAK,CAAC,QAAQ,IAAI,IAAI;AAAA,MAC1B,WAAW,MAAM,SAAS,QAAQ;AAEhC,mBAAW,MAAM,KAAK,OAAQ,KAAuB,WAAW,MAAM,MAAM;AAAA,UAC1E;AAAA,UACA,SAAS;AAAA,YACP,gBAAgB;AAAA,UAClB;AAAA,QACF,CAAC,EAAE,KAAK,CAAC,QAAQ,IAAI,IAAI;AAAA,MAC3B,OAAO;AACL,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAGA,aAAO,KAAK,kBAAkB,QAAQ;AAAA,IACxC,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,cAAc,cAA0D;AAC5E,SAAK,eAAe;AAIpB,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SACE;AAAA,MACJ;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,yBAAyB,SAA4D;AAC3F,UAAM,SAAwC,CAAC;AAE/C,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAGA,QAAI,QAAQ,UAAU;AACpB,aAAO,WAAW,QAAQ;AAAA,IAC5B;AAEA,QAAI,QAAQ,mBAAmB;AAC7B,aAAO,kBAAkB;AAAA,IAC3B;AAGA,QAAI,QAAQ,aAAa;AACvB,aAAO,UAAU;AAAA,IACnB;AAGA,QAAI,QAAQ,oBAAoB,QAAQ,iBAAiB,SAAS,GAAG;AACnE,aAAO,WAAW,QAAQ;AAAA,IAC5B;AAGA,QAAI,QAAQ,eAAe;AACzB,aAAO,YAAY;AAAA,IACrB;AAGA,QAAI,QAAQ,mBAAmB;AAC7B,aAAO,YAAY;AAAA,IACrB;AAGA,QAAI,QAAQ,iBAAiB;AAC3B,aAAO,kBAAkB;AAAA,IAC3B;AAGA,QAAI,QAAQ,cAAc;AACxB,aAAO,SAAS,CAAC,OAAO,KAAK;AAAA,IAC/B;AAGA,QAAI,QAAQ,YAAY;AACtB,aAAO,WAAW,QAAQ;AAAA,IAC5B;AAGA,WAAO,YAAY;AACnB,WAAO,aAAa;AACpB,WAAO,eAAe;AAEtB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,UAAuD;AAE/E,UAAM,UAAU,SAAS,QAAQ,WAAW,CAAC;AAC7C,UAAM,cAAc,SAAS,eAAe,CAAC;AAE7C,QAAI,CAAC,aAAa;AAChB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,OAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,MAAM;AAAA,QACJ,IAAI,SAAS,UAAU,cAAc;AAAA,QACrC,MAAM,YAAY,cAAc;AAAA,QAChC,YAAY,YAAY;AAAA,QACxB,QAAQ;AAAA;AAAA,QACR,UAAU,SAAS,qBAAqB;AAAA,QACxC,UAAU,SAAS,UAAU;AAAA,QAC7B,UAAU,KAAK,gBAAgB,QAAQ;AAAA,QACvC,OAAO,KAAK,aAAa,WAAW;AAAA,QACpC,YAAY,KAAK,kBAAkB,QAAQ;AAAA,QAC3C,SAAS,KAAK,eAAe,WAAW;AAAA,QACxC,UAAU;AAAA,UACR,WAAW,SAAS,UAAU;AAAA,UAC9B,UAAU,SAAS,UAAU;AAAA,UAC7B,WAAW,SAAS,QAAQ;AAAA,UAC5B,SAAS,SAAS,QAAQ;AAAA,UAC1B,QAAQ,SAAS,QAAQ;AAAA,QAC3B;AAAA,MACF;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,UAA4B;AAClD,UAAM,aAAa,SAAS,QAAQ;AAEpC,QAAI,CAAC,cAAc,WAAW,WAAW,GAAG;AAC1C,aAAO;AAAA,IACT;AAGA,UAAM,aAAa,oBAAI,IAAY;AACnC,eAAW,QAAQ,CAAC,cAAqD;AACvE,UAAI,UAAU,YAAY,QAAW;AACnC,mBAAW,IAAI,UAAU,OAAO;AAAA,MAClC;AAAA,IACF,CAAC;AAED,QAAI,WAAW,SAAS,GAAG;AACzB,aAAO;AAAA,IACT;AAEA,WAAO,MAAM,KAAK,UAAU,EAAE,IAAI,CAAC,eAAe;AAAA,MAChD,IAAI,UAAU,SAAS;AAAA,MACvB,OAAO,WAAW,SAAS;AAAA,IAC7B,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,aAAkE;AACrF,QAAI,CAAC,YAAY,SAAS,YAAY,MAAM,WAAW,GAAG;AACxD,aAAO;AAAA,IACT;AAEA,WAAO,YAAY,MAAM;AAAA,MACvB,CAAC,UAAwE;AAAA,QACvE,MAAM,KAAK,QAAQ;AAAA,QACnB,OAAO,KAAK,SAAS;AAAA,QACrB,KAAK,KAAK,OAAO;AAAA,QACjB,YAAY,KAAK;AAAA,QACjB,SAAS;AAAA;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,UAA4B;AACpD,UAAM,aAAa,SAAS,QAAQ;AAEpC,QAAI,CAAC,cAAc,WAAW,WAAW,GAAG;AAC1C,aAAO;AAAA,IACT;AAEA,WAAO,WAAW,IAAI,CAAC,eAAsD;AAAA,MAC3E,MAAM,UAAU,cAAc;AAAA,MAC9B,OAAO,UAAU,SAAS;AAAA,MAC1B,KAAK,UAAU,OAAO;AAAA,MACtB,SAAS,UAAU,SAAS,SAAS;AAAA,MACrC,YAAY,UAAU;AAAA,MACtB,OAAO,UAAU,OAAO,IAAI,CAAC,UAAU;AAAA,QACrC,MAAM,KAAK,QAAQ;AAAA,QACnB,OAAO,KAAK,SAAS;AAAA,QACrB,KAAK,KAAK,OAAO;AAAA,QACjB,YAAY,KAAK;AAAA,MACnB,EAAE;AAAA,IACJ,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKQ,eACN,aACoB;AACpB,QAAI,CAAC,YAAY,aAAa,YAAY,UAAU,WAAW,GAAG;AAChE,aAAO;AAAA,IACT;AAGA,WAAO,YAAY,UAChB,IAAI,CAAC,YAAY,QAAQ,OAAO,EAChC,OAAO,OAAO,EACd,KAAK,GAAG;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA0CA,MAAM,iBACJ,SACA,WAC2B;AAC3B,SAAK,eAAe;AAGpB,UAAM,SAAS,IAAI,gBAAgB;AAEnC,QAAI,SAAS,SAAU,QAAO,OAAO,YAAY,QAAQ,QAAQ;AACjE,QAAI,SAAS,WAAY,QAAO,OAAO,eAAe,QAAQ,WAAW,SAAS,CAAC;AACnF,QAAI,SAAS,SAAU,QAAO,OAAO,YAAY,QAAQ,SAAS,SAAS,CAAC;AAC5E,QAAI,SAAS,SAAU,QAAO,OAAO,YAAY,QAAQ,QAAQ;AACjE,QAAI,SAAS,kBAAmB,QAAO,OAAO,mBAAmB,MAAM;AACvE,QAAI,SAAS,YAAa,QAAO,OAAO,WAAW,MAAM;AACzD,QAAI,SAAS,eAAgB,QAAO,OAAO,mBAAmB,MAAM;AACpE,QAAI,SAAS,cAAe,QAAO,OAAO,aAAa,MAAM;AAC7D,QAAI,SAAS,kBAAmB,QAAO,OAAO,aAAa,MAAM;AACjE,QAAI,SAAS,gBAAiB,QAAO,OAAO,mBAAmB,MAAM;AACrE,QAAI,SAAS,aAAc,QAAO,OAAO,UAAU,KAAK;AACxD,QAAI,SAAS,oBAAoB,QAAQ,iBAAiB,SAAS,GAAG;AACpE,aAAO,OAAO,YAAY,QAAQ,iBAAiB,KAAK,GAAG,CAAC;AAAA,IAC9D;AAEA,UAAM,QAAQ,GAAG,KAAK,SAAS,IAAI,OAAO,SAAS,CAAC;AAGpD,UAAM,KAAK,IAAI,WAAAC,QAAU,OAAO;AAAA,MAC9B,SAAS;AAAA,QACP,eAAe,SAAS,KAAK,OAAQ,MAAM;AAAA,MAC7C;AAAA,IACF,CAAC;AAED,QAAI,gBAA8D;AAClE,UAAM,YAAY,YAAY,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,UAAU,CAAC,CAAC;AAGnF,OAAG,GAAG,QAAQ,MAAM;AAClB,sBAAgB;AAChB,iBAAW,SAAS;AAAA,IACtB,CAAC;AAED,OAAG,GAAG,WAAW,CAAC,SAAiB;AACjC,UAAI;AACF,cAAM,UAAU,KAAK,MAAM,KAAK,SAAS,CAAC;AAG1C,YAAI,QAAQ,SAAS,WAAW;AAE9B,gBAAM,UAAU,QAAQ,QAAQ,aAAa,CAAC;AAE9C,cAAI,SAAS;AACX,kBAAM,aAAa,QAAQ;AAC3B,kBAAM,UAAU,QAAQ;AACxB,kBAAM,QAAQ,QAAQ,OAAO,IAAI,CAAC,UAAU;AAAA,cAC1C,MAAM,KAAK;AAAA,cACX,OAAO,KAAK;AAAA,cACZ,KAAK,KAAK;AAAA,cACV,YAAY,KAAK;AAAA,YACnB,EAAE;AAEF,uBAAW,eAAe;AAAA,cACxB,MAAM;AAAA,cACN,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA,YAAY,QAAQ;AAAA,cACpB,MAAM;AAAA,YACR,CAAC;AAAA,UACH;AAAA,QACF,WAAW,QAAQ,SAAS,gBAAgB;AAE1C,qBAAW,aAAa,OAAO;AAAA,QACjC,WAAW,QAAQ,SAAS,YAAY;AAEtC,qBAAW,aAAa,OAAO;AAAA,QACjC;AAAA,MACF,SAAS,OAAO;AACd,mBAAW,UAAU;AAAA,UACnB,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAED,OAAG,GAAG,SAAS,CAAC,UAAiB;AAC/B,iBAAW,UAAU;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,MAAM;AAAA,QACf,SAAS;AAAA,MACX,CAAC;AAAA,IACH,CAAC;AAED,OAAG,GAAG,SAAS,CAAC,MAAc,WAAmB;AAC/C,sBAAgB;AAChB,iBAAW,UAAU,MAAM,OAAO,SAAS,CAAC;AAAA,IAC9C,CAAC;AAGD,UAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,YAAM,UAAU,WAAW,MAAM;AAC/B,eAAO,IAAI,MAAM,8BAA8B,CAAC;AAAA,MAClD,GAAG,GAAK;AAER,SAAG,KAAK,QAAQ,MAAM;AACpB,qBAAa,OAAO;AACpB,gBAAQ;AAAA,MACV,CAAC;AAED,SAAG,KAAK,SAAS,CAAC,UAAU;AAC1B,qBAAa,OAAO;AACpB,eAAO,KAAK;AAAA,MACd,CAAC;AAAA,IACH,CAAC;AAGD,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,UAAU,KAAK;AAAA,MACf,WAAW,oBAAI,KAAK;AAAA,MACpB,WAAW,MAAM;AAAA,MACjB,WAAW,OAAO,UAAsB;AACtC,YAAI,kBAAkB,QAAQ;AAC5B,gBAAM,IAAI,MAAM,iCAAiC,aAAa,EAAE;AAAA,QAClE;AAEA,YAAI,GAAG,eAAe,WAAAA,QAAU,MAAM;AACpC,gBAAM,IAAI,MAAM,uBAAuB;AAAA,QACzC;AAGA,WAAG,KAAK,MAAM,IAAI;AAGlB,YAAI,MAAM,QAAQ;AAChB,aAAG,KAAK,KAAK,UAAU,EAAE,MAAM,cAAc,CAAC,CAAC;AAAA,QACjD;AAAA,MACF;AAAA,MACA,OAAO,YAAY;AACjB,YAAI,kBAAkB,YAAY,kBAAkB,WAAW;AAC7D;AAAA,QACF;AAEA,wBAAgB;AAGhB,YAAI,GAAG,eAAe,WAAAA,QAAU,MAAM;AACpC,aAAG,KAAK,KAAK,UAAU,EAAE,MAAM,cAAc,CAAC,CAAC;AAAA,QACjD;AAGA,eAAO,IAAI,QAAc,CAAC,YAAY;AACpC,gBAAM,UAAU,WAAW,MAAM;AAC/B,eAAG,UAAU;AACb,oBAAQ;AAAA,UACV,GAAG,GAAI;AAEP,aAAG,MAAM;AAET,aAAG,KAAK,SAAS,MAAM;AACrB,yBAAa,OAAO;AACpB,4BAAgB;AAChB,oBAAQ;AAAA,UACV,CAAC;AAAA,QACH,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,sBAAsB,QAAyC;AAC7E,QAAM,UAAU,IAAI,gBAAgB;AACpC,UAAQ,WAAW,MAAM;AACzB,SAAO;AACT;;;ACtpBA,IAAAC,gBAAkB;;;ACKlB,IAAAC,gBAAkB;AAq0BX,IAAM,uBAAuB,CAClC,eACA,YACmB;AACnB,SAAO,cAAAC,QAAM,KAAK,mBAAmB,eAAe,OAAO;AAC7D;AAKO,IAAM,oBAAoB,CAC/B,IACA,YACmB;AACnB,SAAO,cAAAA,QAAM,IAAI,mBAAmB,EAAE,IAAI,OAAO;AACnD;AA0BO,IAAM,0BAA0B,CACrC,IACA,QACA,YACmB;AACnB,SAAO,cAAAC,QAAM,IAAI,mBAAmB,EAAE,UAAU;AAAA,IAC9C,GAAG;AAAA,IACH,QAAQ,EAAE,GAAG,QAAQ,GAAG,SAAS,OAAO;AAAA,EAC1C,CAAC;AACH;;;AD/xBO,IAAM,kBAAN,cAA8B,YAAY;AAAA,EAA1C;AAAA;AACL,SAAS,OAAO;AAChB,SAAS,eAAqC;AAAA,MAC5C,WAAW;AAAA;AAAA,MACX,aAAa;AAAA,MACb,gBAAgB;AAAA,MAChB,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,MACjB,cAAc;AAAA,IAChB;AAGA,SAAU,UAAU;AAAA;AAAA;AAAA,EAEpB,WAAW,QAAoD;AAC7D,UAAM,WAAW,MAAM;AAEvB,SAAK,SAAS,OAAO,UAAU;AAC/B,SAAK,UACH,OAAO,WAAW,WAAW,KAAK,MAAM;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMU,iBAAiB;AACzB,WAAO,MAAM,eAAe,2BAA2B;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,WACJ,OACA,SACoC;AACpC,SAAK,eAAe;AAEpB,QAAI,MAAM,SAAS,OAAO;AACxB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,OAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,MACF;AAAA,IACF;AAEA,QAAI;AACF,YAAM,uBAA+C;AAAA,QACnD,aAAc,SAAS,UAAU,eAA0B;AAAA,QAC3D,aAAc,SAAS,UAAU,eAA0B;AAAA,QAC3D,QAAQ,SAAS,YAAY;AAAA,QAC7B,aAAa,CAAC,MAAM,GAAG;AAAA,QACvB,YAAY,KAAK,6BAA6B,OAAO;AAAA,MACvD;AAGA,YAAM,WAAW,MAAM;AAAA,QACrB;AAAA,QACA,KAAK,eAAe;AAAA,MACtB;AAEA,YAAM,gBAAgB,SAAS;AAE/B,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,MAAM;AAAA,UACJ,IAAI,cAAc,MAAM,MAAM,GAAG,EAAE,IAAI,KAAK;AAAA,UAC5C,MAAM;AAAA;AAAA,UACN,QAAQ,KAAK,gBAAgB,cAAc,MAAM;AAAA,UACjD,UAAU,cAAc;AAAA,UACxB,WAAW,cAAc;AAAA,QAC3B;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,cAAc,cAA0D;AAC5E,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,iBAAiB,MAAM,kBAAkB,cAAc,KAAK,eAAe,CAAC;AAElF,YAAM,gBAAgB,eAAe;AACrC,YAAM,SAAS,KAAK,gBAAgB,cAAc,MAAM;AAExD,UAAI,WAAW,aAAa;AAC1B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,MAAM;AAAA,YACJ,IAAI;AAAA,YACJ,MAAM;AAAA,YACN;AAAA,YACA,UAAU,cAAc;AAAA,YACxB,WAAW,cAAc;AAAA,UAC3B;AAAA,UACA,KAAK;AAAA,QACP;AAAA,MACF;AAGA,UAAI,CAAC,cAAc,OAAO,OAAO;AAC/B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,OAAO;AAAA,YACL,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,UACA,KAAK;AAAA,QACP;AAAA,MACF;AAEA,YAAM,gBAAgB,MAAM;AAAA,QAC1B;AAAA,QACA;AAAA,QACA,KAAK,eAAe;AAAA,MACtB;AACA,YAAM,QAAQ,cAAc,MAAM,UAAU,CAAC;AAG7C,YAAM,aAAa,MAAM,KAAK,CAAC,SAAc,KAAK,SAAS,eAAe;AAE1E,UAAI,CAAC,YAAY,OAAO,YAAY;AAClC,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,OAAO;AAAA,YACL,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,UACA,KAAK;AAAA,QACP;AAAA,MACF;AAGA,YAAM,kBAAkB,MAAM,cAAAC,QAAM,IAAI,WAAW,MAAM,UAAU;AACnE,YAAM,oBAAoB,gBAAgB;AAE1C,aAAO,KAAK,kBAAkB,eAAe,iBAAiB;AAAA,IAChE,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,6BAA6B,SAAsD;AACzF,UAAM,aAAkB;AAAA,MACtB,4BAA4B,SAAS,kBAAkB;AAAA,MACvD,iBAAiB;AAAA,MACjB,qBAAqB;AAAA,IACvB;AAEA,QAAI,SAAS,aAAa;AACxB,iBAAW,qBAAqB;AAChC,UAAI,QAAQ,kBAAkB;AAC5B,mBAAW,cAAc;AAAA,UACvB,UAAU;AAAA,YACR,UAAU;AAAA,YACV,UAAU,QAAQ;AAAA,UACpB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,oBAAoB,QAAQ,iBAAiB,SAAS,GAAG;AACpE,iBAAW,mBAAmB;AAAA,QAC5B,SAAS,QAAQ,iBAAiB,KAAK,GAAG;AAAA,MAC5C;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,QAA8D;AACpF,UAAM,YAAY,QAAQ,SAAS,EAAE,YAAY,KAAK;AAEtD,QAAI,UAAU,SAAS,WAAW,EAAG,QAAO;AAC5C,QAAI,UAAU,SAAS,SAAS,EAAG,QAAO;AAC1C,QAAI,UAAU,SAAS,YAAY,EAAG,QAAO;AAC7C,QAAI,UAAU,SAAS,QAAQ,EAAG,QAAO;AAEzC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBACN,eACA,mBAC2B;AAC3B,UAAM,kBAAkB,kBAAkB,6BAA6B,CAAC;AACxE,UAAM,oBAAoB,kBAAkB,qBAAqB,CAAC;AAGlE,UAAM,WACJ,gBAAgB,IAAI,CAAC,WAAgB,OAAO,WAAW,OAAO,OAAO,EAAE,KAAK,GAAG,KAAK;AAGtF,UAAM,QAAQ,kBAAkB;AAAA,MAAQ,CAAC,YACtC,OAAO,QAAQ,CAAC,GAAG,SAAS,CAAC,GAAG,IAAI,CAAC,UAAe;AAAA,QACnD,MAAM,KAAK;AAAA,QACX,OAAO,KAAK,gBAAgB;AAAA;AAAA,QAC5B,MAAM,KAAK,gBAAgB,KAAK,mBAAmB;AAAA,QACnD,YAAY,KAAK;AAAA,QACjB,SAAS,OAAO,YAAY,SAAY,OAAO,QAAQ,SAAS,IAAI;AAAA,MACtE,EAAE;AAAA,IACJ;AAGA,UAAM,WACJ,kBAAkB,SAAS,KAAK,kBAAkB,CAAC,EAAE,YAAY,SAC7D,MAAM;AAAA,MACJ,IAAI;AAAA,QACF,kBAAkB,IAAI,CAAC,MAAW,EAAE,OAAO,EAAE,OAAO,CAAC,MAAW,MAAM,MAAS;AAAA,MACjF;AAAA,IACF,EAAE,IAAI,CAAC,eAAwB;AAAA,MAC7B,IAAI,OAAO,SAAS;AAAA,MACpB,OAAO,WAAW,SAAS;AAAA,IAC7B,EAAE,IACF;AAEN,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,MAAM;AAAA,QACJ,IAAI,cAAc,MAAM,MAAM,GAAG,EAAE,IAAI,KAAK;AAAA,QAC5C,MAAM;AAAA,QACN,YAAY,kBAAkB,CAAC,GAAG,QAAQ,CAAC,GAAG;AAAA,QAC9C,QAAQ;AAAA,QACR,UAAU,cAAc;AAAA,QACxB,UAAU,kBAAkB,WAAW,kBAAkB,WAAW,MAAW;AAAA,QAC/E;AAAA,QACA,OAAO,MAAM,SAAS,IAAI,QAAQ;AAAA,QAClC,WAAW,cAAc;AAAA,QACzB,aAAa,cAAc;AAAA,MAC7B;AAAA,MACA,KAAK;AAAA,QACH;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,sBACd,QACiB;AACjB,QAAM,UAAU,IAAI,gBAAgB;AACpC,UAAQ,WAAW,MAAM;AACzB,SAAO;AACT;;;AE5XA,IAAAC,gBAAkB;;;ACIlB,IAAAC,gBAAkB;AAmCX,IAAM,sBAAsB,CAGjC,4BACA,YACmB;AACnB,QAAM,WAAW,IAAI,SAAS;AAC9B,WAAS,OAAO,QAAQ,2BAA2B,IAAI;AACvD,WAAS,OAAO,SAAS,2BAA2B,KAAK;AACzD,MAAI,2BAA2B,aAAa,QAAW;AACrD,aAAS,OAAO,YAAY,2BAA2B,QAAQ;AAAA,EACjE;AACA,MAAI,2BAA2B,WAAW,QAAW;AACnD,aAAS,OAAO,UAAU,2BAA2B,MAAM;AAAA,EAC7D;AACA,MAAI,2BAA2B,oBAAoB,QAAW;AAC5D,aAAS,OAAO,mBAAmB,2BAA2B,eAAe;AAAA,EAC/E;AACA,MAAI,2BAA2B,gBAAgB,QAAW;AACxD,aAAS,OAAO,eAAe,2BAA2B,YAAY,SAAS,CAAC;AAAA,EAClF;AACA,MAAI,2BAA2B,YAAY,QAAW;AACpD,+BAA2B,QAAQ,QAAQ,CAAC,UAAU,SAAS,OAAO,WAAW,KAAK,CAAC;AAAA,EACzF;AACA,MAAI,2BAA2B,4BAA4B,QAAW;AACpE,+BAA2B,wBAAwB;AAAA,MAAQ,CAAC,UAC1D,SAAS,OAAO,2BAA2B,KAAK;AAAA,IAClD;AAAA,EACF;AACA,MACE,2BAA2B,WAAW,UACtC,2BAA2B,WAAW,MACtC;AACA,aAAS,OAAO,UAAU,2BAA2B,OAAO,SAAS,CAAC;AAAA,EACxE;AACA,MACE,2BAA2B,sBAAsB,UACjD,2BAA2B,sBAAsB,MACjD;AACA,aAAS;AAAA,MACP;AAAA,MACA,OAAO,2BAA2B,sBAAsB,WACpD,KAAK,UAAU,2BAA2B,iBAAiB,IAC3D,2BAA2B;AAAA,IACjC;AAAA,EACF;AACA,MAAI,2BAA2B,wBAAwB,QAAW;AAChE,+BAA2B,oBAAoB;AAAA,MAAQ,CAAC,UACtD,SAAS,OAAO,uBAAuB,KAAK;AAAA,IAC9C;AAAA,EACF;AACA,MAAI,2BAA2B,6BAA6B,QAAW;AACrE,+BAA2B,yBAAyB;AAAA,MAAQ,CAAC,UAC3D,SAAS,OAAO,4BAA4B,KAAK;AAAA,IACnD;AAAA,EACF;AAEA,SAAO,cAAAC,QAAM,KAAK,yBAAyB,UAAU,OAAO;AAC9D;;;ADMO,IAAM,uBAAN,cAAmC,YAAY;AAAA,EAA/C;AAAA;AACL,SAAS,OAAO;AAChB,SAAS,eAAqC;AAAA,MAC5C,WAAW;AAAA;AAAA,MACX,aAAa;AAAA;AAAA,MACb,gBAAgB;AAAA,MAChB,mBAAmB;AAAA;AAAA,MACnB,kBAAkB;AAAA;AAAA,MAClB,eAAe;AAAA,MACf,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,MACjB,cAAc;AAAA,IAChB;AAEA,SAAU,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMV,iBAAiB;AACzB,WAAO,MAAM,eAAe,iBAAiB,CAAC,WAAW,UAAU,MAAM,EAAE;AAAA,EAC7E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,WACJ,OACA,SACoC;AACpC,SAAK,eAAe;AAEpB,QAAI;AAEF,UAAI;AACJ,UAAI,WAAW;AAEf,UAAI,MAAM,SAAS,OAAO;AACxB,cAAMC,YAAW,MAAM,cAAAC,QAAM,IAAI,MAAM,KAAK;AAAA,UAC1C,cAAc;AAAA,QAChB,CAAC;AACD,oBAAY,OAAO,KAAKD,UAAS,IAAI;AAGrC,cAAM,UAAU,IAAI,IAAI,MAAM,GAAG,EAAE;AACnC,cAAM,gBAAgB,QAAQ,MAAM,GAAG,EAAE,IAAI;AAC7C,YAAI,eAAe;AACjB,qBAAW;AAAA,QACb;AAAA,MACF,WAAW,MAAM,SAAS,QAAQ;AAChC,oBAAY,MAAM;AAClB,mBAAW,MAAM,YAAY;AAAA,MAC/B,OAAO;AACL,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,OAAO;AAAA,YACL,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,QACF;AAAA,MACF;AAGA,YAAM,QAAQ,KAAK,YAAY,OAAO;AAGtC,YAAM,gBAAgB,UAAU;AAChC,YAAM,aAAa,SAAS,mBAAmB;AAG/C,YAAM,UAAsC;AAAA,QAC1C,MAAM;AAAA;AAAA,QACN;AAAA,MACF;AAGA,UAAI,SAAS,UAAU;AACrB,gBAAQ,WAAW,QAAQ;AAAA,MAC7B;AAEA,UAAI,SAAS,UAAU,QAAQ;AAC7B,gBAAQ,SAAS,QAAQ,SAAS;AAAA,MACpC;AAEA,UAAI,SAAS,UAAU,gBAAgB,QAAW;AAChD,gBAAQ,cAAc,QAAQ,SAAS;AAAA,MACzC;AAEA,UAAI,eAAe;AAEjB,gBAAQ,kBAAkB;AAG1B,YAAI,SAAS,UAAU,mBAAmB;AACxC,kBAAQ,sBAAsB,QAAQ,SAAS;AAAA,QACjD;AAEA,YAAI,SAAS,UAAU,wBAAwB;AAC7C,kBAAQ,2BAA2B,QAAQ,SAAS;AAAA,QACtD;AAAA,MACF,WAAW,cAAc,SAAS,aAAa;AAE7C,gBAAQ,kBAAkB;AAG1B,YAAI,YAAY;AACd,kBAAQ,0BAA0B,CAAC,QAAQ,SAAS;AAAA,QACtD;AAAA,MACF,OAAO;AAEL,gBAAQ,kBAAkB;AAAA,MAC5B;AAGA,YAAM,WAAW,MAAM,oBAAoB,SAAS,KAAK,eAAe,CAAC;AAEzE,aAAO,KAAK,kBAAkB,SAAS,MAAa,OAAO,aAAa;AAAA,IAC1E,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,cAAc,cAA0D;AAC5E,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SACE;AAAA,MACJ;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAY,SAAsD;AAExE,QAAI,SAAS,UAAU,OAAO;AAC5B,aAAO,QAAQ,SAAS;AAAA,IAC1B;AAGA,QAAI,SAAS,aAAa;AACxB,aAAO;AAAA,IACT;AAGA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBACN,UAIA,OACA,eAC2B;AAE3B,QAAI,UAAU,YAAY,OAAO,KAAK,QAAQ,EAAE,WAAW,GAAG;AAC5D,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,MAAM;AAAA,UACJ,IAAI,UAAU,KAAK,IAAI,CAAC;AAAA,UACxB,MAAM,SAAS;AAAA,UACf,QAAQ;AAAA,UACR,UAAU;AAAA,UACV,YAAY;AAAA,QACd;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAGA,QAAI,iBAAiB,cAAc,UAAU;AAC3C,YAAM,mBAAmB;AAGzB,YAAM,aAAa,IAAI,IAAI,iBAAiB,SAAS,IAAI,CAAC,QAAQ,IAAI,OAAO,CAAC;AAC9E,YAAM,WAAW,MAAM,KAAK,UAAU,EAAE,IAAI,CAAC,aAAa;AAAA,QACxD,IAAI;AAAA,QACJ,OAAO;AAAA;AAAA,MACT,EAAE;AAGF,YAAM,aAAa,iBAAiB,SAAS,IAAI,CAAC,aAAa;AAAA,QAC7D,SAAS,QAAQ;AAAA,QACjB,MAAM,QAAQ;AAAA,QACd,OAAO,QAAQ;AAAA,QACf,KAAK,QAAQ;AAAA,QACb,YAAY;AAAA,MACd,EAAE;AAEF,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,MAAM;AAAA,UACJ,IAAI,UAAU,KAAK,IAAI,CAAC;AAAA,UACxB,MAAM,iBAAiB;AAAA,UACvB,QAAQ;AAAA,UACR,UAAU;AAAA,UACV,UAAU,iBAAiB;AAAA,UAC3B;AAAA,UACA;AAAA,QACF;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAGA,QAAI,cAAc,YAAY,cAAc,UAAU;AACpD,YAAM,kBAAkB;AAGxB,YAAM,QAAQ,gBAAgB,OAAO,IAAI,CAAC,UAAU;AAAA,QAClD,MAAM,KAAK;AAAA,QACX,OAAO,KAAK;AAAA,QACZ,KAAK,KAAK;AAAA,QACV,YAAY;AAAA,MACd,EAAE;AAEF,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,MAAM;AAAA,UACJ,IAAI,UAAU,KAAK,IAAI,CAAC;AAAA,UACxB,MAAM,gBAAgB;AAAA,UACtB,QAAQ;AAAA,UACR,UAAU,gBAAgB;AAAA,UAC1B,UAAU,gBAAgB;AAAA,UAC1B;AAAA,QACF;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAGA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,MAAM;AAAA,QACJ,IAAI,UAAU,KAAK,IAAI,CAAC;AAAA,QACxB,MAAM,UAAU,WAAW,SAAS,OAAO;AAAA,QAC3C,QAAQ;AAAA,MACV;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AACF;AAKO,SAAS,2BAA2B,QAA8C;AACvF,QAAM,UAAU,IAAI,qBAAqB;AACzC,UAAQ,WAAW,MAAM;AACzB,SAAO;AACT;;;AE/XA,IAAAE,gBAA0C;AAkGnC,IAAM,sBAAN,cAAkC,YAAY;AAAA,EAA9C;AAAA;AACL,SAAS,OAAO;AAChB,SAAS,eAAqC;AAAA,MAC5C,WAAW;AAAA;AAAA,MACX,aAAa;AAAA,MACb,gBAAgB;AAAA,MAChB,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,MACjB,cAAc;AAAA,IAChB;AAGA,SAAU,UAAU;AAAA;AAAA,EAEpB,WAAW,QAA8B;AACvC,UAAM,WAAW,MAAM;AAEvB,SAAK,UAAU,OAAO,WAAW,KAAK;AAEtC,SAAK,SAAS,cAAAC,QAAM,OAAO;AAAA,MACzB,SAAS,KAAK;AAAA,MACd,SAAS,OAAO,WAAW;AAAA,MAC3B,SAAS;AAAA,QACP,eAAe,UAAU,OAAO,MAAM;AAAA,QACtC,GAAG,OAAO;AAAA,MACZ;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,WACJ,OACA,SACoC;AACpC,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,YAAuB;AAAA,QAC3B,MAAM;AAAA,QACN,sBAAsB;AAAA,UACpB,UAAU,SAAS,YAAY;AAAA,UAC/B,iBACG,SAAS,UAAU,mBAA+C;AAAA,QACvE;AAAA,MACF;AAGA,UAAI,SAAS,aAAa;AACxB,YAAI,CAAC,UAAU,sBAAsB;AACnC,oBAAU,uBAAuB,CAAC;AAAA,QACpC;AACA,kBAAU,qBAAqB,cAAc;AAC7C,YAAI,QAAQ,kBAAkB;AAC5B,oBAAU,qBAAqB,6BAA6B;AAAA,YAC1D,cAAc,QAAQ;AAAA,UACxB;AAAA,QACF;AAAA,MACF;AAGA,UAAI,SAAS,mBAAmB;AAC9B,YAAI,CAAC,UAAU,sBAAsB;AACnC,oBAAU,uBAAuB,CAAC;AAAA,QACpC;AACA,kBAAU,qBAAqB,4BAA4B;AAAA,MAC7D;AAGA,UAAI,SAAS,iBAAiB,SAAS,UAAU,cAAc;AAC7D,YAAI,CAAC,UAAU,sBAAsB;AACnC,oBAAU,uBAAuB,CAAC;AAAA,QACpC;AACA,kBAAU,qBAAqB,uBAAuB;AAAA,UACpD,MAAM,QAAQ,SAAS;AAAA,UACvB,QAAS,QAAQ,SAAS,kBAAkD;AAAA,QAC9E;AAAA,MACF;AAGA,UAAI,SAAS,oBAAoB,QAAQ,iBAAiB,SAAS,GAAG;AACpE,YAAI,CAAC,UAAU,sBAAsB;AACnC,oBAAU,uBAAuB,CAAC;AAAA,QACpC;AACA,kBAAU,qBAAqB,mBAAmB,QAAQ;AAAA,MAC5D;AAGA,UAAI;AACJ,UAAI,UAAkC,CAAC;AAEvC,UAAI,MAAM,SAAS,OAAO;AAExB,kBAAU,aAAa;AAAA,UACrB,KAAK,MAAM;AAAA,QACb;AACA,sBAAc,EAAE,QAAQ,KAAK,UAAU,SAAS,EAAE;AAClD,kBAAU,EAAE,gBAAgB,mBAAmB;AAAA,MACjD,WAAW,MAAM,SAAS,QAAQ;AAEhC,sBAAc;AAAA,UACZ,QAAQ,KAAK,UAAU,SAAS;AAAA,UAChC,WAAW,MAAM;AAAA,QACnB;AACA,kBAAU,EAAE,gBAAgB,sBAAsB;AAAA,MACpD,OAAO;AACL,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,OAAO;AAAA,YACL,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,QACF;AAAA,MACF;AAGA,YAAM,WAAW,MAAM,KAAK,OAAQ,KAAwB,SAAS,aAAa,EAAE,QAAQ,CAAC;AAE7F,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,MAAM;AAAA,UACJ,IAAI,SAAS,KAAK;AAAA,UAClB,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,WAAW,SAAS,KAAK;AAAA,QAC3B;AAAA,QACA,KAAK,SAAS;AAAA,MAChB;AAAA,IACF,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,cAAc,cAA0D;AAC5E,SAAK,eAAe;AAEpB,QAAI;AAEF,YAAM,iBAAiB,MAAM,KAAK,OAAQ,IAAwB,SAAS,YAAY,EAAE;AAEzF,YAAM,SAAS,KAAK,gBAAgB,eAAe,KAAK,IAAI,MAAM;AAElE,UAAI,WAAW,aAAa;AAC1B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,MAAM;AAAA,YACJ,IAAI;AAAA,YACJ,MAAM;AAAA,YACN;AAAA,YACA,WAAW,eAAe,KAAK,IAAI;AAAA,UACrC;AAAA,UACA,KAAK,eAAe;AAAA,QACtB;AAAA,MACF;AAGA,YAAM,qBAAqB,MAAM,KAAK,OAAQ;AAAA,QAC5C,SAAS,YAAY;AAAA,MACvB;AAEA,aAAO,KAAK,kBAAkB,mBAAmB,IAAI;AAAA,IACvD,SAAS,OAAO;AACd,aAAO,KAAK,oBAAoB,KAAK;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,QAAiE;AACvF,YAAQ,QAAQ;AAAA,MACd,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AACH,eAAO;AAAA,MACT,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,MACT;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,UAA4D;AAEpF,UAAM,OAAO,SAAS,QACnB,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE,YAAY,EACjD,IAAI,CAAC,MAAM,EAAE,aAAc,CAAC,GAAG,WAAW,EAAE,EAC5C,KAAK,GAAG;AAGX,UAAM,QAAQ,SAAS,QACpB,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE,eAAe,UAAa,EAAE,aAAa,MAAS,EACzF,IAAI,CAAC,YAAY;AAAA,MAChB,MAAM,OAAO,eAAe,CAAC,GAAG,WAAW;AAAA,MAC3C,OAAO,OAAO;AAAA,MACd,KAAK,OAAO;AAAA,MACZ,YAAY,OAAO,eAAe,CAAC,GAAG;AAAA,MACtC,SAAS,OAAO,eAAe,CAAC,GAAG;AAAA,IACrC,EAAE;AAGJ,UAAM,aAAa,oBAAI,IAAY;AACnC,aAAS,QAAQ,QAAQ,CAAC,MAAM;AAC9B,UAAI,EAAE,cAAc;AAClB,cAAM,UAAU,EAAE,aAAa,CAAC,GAAG;AACnC,YAAI,QAAS,YAAW,IAAI,OAAO;AAAA,MACrC;AAAA,IACF,CAAC;AAED,UAAM,WACJ,WAAW,OAAO,IACd,MAAM,KAAK,UAAU,EAAE,IAAI,CAAC,QAAQ;AAAA,MAClC;AAAA,MACA,OAAO,WAAW,EAAE;AAAA,IACtB,EAAE,IACF;AAGN,UAAM,aAKD,CAAC;AAEN,QAAI,UAAU;AACZ,UAAI;AACJ,UAAI,mBAA6B,CAAC;AAClC,UAAI,iBAAiB;AAErB,eAAS,QACN,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE,YAAY,EACjD,QAAQ,CAAC,QAAQ,QAAQ;AACxB,cAAM,UAAU,OAAO,aAAc,CAAC,GAAG;AACzC,cAAM,OAAO,OAAO,aAAc,CAAC,GAAG,WAAW;AAEjD,YAAI,YAAY,gBAAgB;AAE9B,cAAI,kBAAkB,iBAAiB,SAAS,GAAG;AACjD,kBAAM,aAAa,SAAS,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,MAAM,EAAE,MAAM,CAAC;AAC5E,uBAAW,KAAK;AAAA,cACd,SAAS;AAAA,cACT,MAAM,iBAAiB,KAAK,GAAG;AAAA,cAC/B,OAAO,kBAAkB;AAAA,cACzB,KAAK,YAAY,YAAY,OAAO,cAAc;AAAA,YACpD,CAAC;AAAA,UACH;AAGA,2BAAiB;AACjB,6BAAmB,CAAC,IAAI;AACxB,2BAAiB,OAAO,cAAc;AAAA,QACxC,OAAO;AACL,2BAAiB,KAAK,IAAI;AAAA,QAC5B;AAAA,MACF,CAAC;AAGH,UAAI,kBAAkB,iBAAiB,SAAS,GAAG;AACjD,cAAM,WAAW,SAAS,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,MAAM,EAAE,IAAI;AACvE,mBAAW,KAAK;AAAA,UACd,SAAS;AAAA,UACT,MAAM,iBAAiB,KAAK,GAAG;AAAA,UAC/B,OAAO;AAAA,UACP,KAAK,UAAU,YAAY;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,MAAM;AAAA,QACJ,IAAI,SAAS,IAAI;AAAA,QACjB;AAAA,QACA,QAAQ;AAAA,QACR,UAAU,SAAS,SAAS,sBAAsB;AAAA,QAClD,UAAU,SAAS,IAAI;AAAA,QACvB;AAAA,QACA,OAAO,MAAM,SAAS,IAAI,QAAQ;AAAA,QAClC,YAAY,WAAW,SAAS,IAAI,aAAa;AAAA,QACjD,SAAS,SAAS,SAAS;AAAA,QAC3B,WAAW,SAAS,IAAI;AAAA,MAC1B;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AACF;AAKO,SAAS,0BAA0B,QAA6C;AACrF,QAAM,UAAU,IAAI,oBAAoB;AACxC,UAAQ,WAAW,MAAM;AACzB,SAAO;AACT;;;AC7ZO,IAAe,qBAAf,MAAkC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA2FvC,SACE,SACA,SACmB;AACnB,QAAI;AAEF,UAAI,CAAC,KAAK,QAAQ,SAAS,OAAO,GAAG;AACnC,eAAO;AAAA,UACL,OAAO;AAAA,UACP,OAAO,0BAA0B,KAAK,QAAQ;AAAA,QAChD;AAAA,MACF;AAGA,YAAM,QAAQ,KAAK,MAAM,SAAS,OAAO;AAGzC,UAAI,CAAC,MAAM,YAAY,CAAC,MAAM,WAAW;AACvC,eAAO;AAAA,UACL,OAAO;AAAA,UACP,OAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO;AAAA,QACL,OAAO;AAAA,QACP,UAAU,KAAK;AAAA,QACf,SAAS;AAAA,UACP,WAAW,MAAM;AAAA,UACjB,SAAS,MAAM;AAAA,QACjB;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,aAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,QAChD,SAAS,EAAE,MAAM;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKU,iBAAiB,SAAkB,cAA2C;AACtF,WAAO;AAAA,MACL,SAAS;AAAA,MACT,UAAU,KAAK;AAAA,MACf,WAAW;AAAA,MACX,MAAM;AAAA,QACJ,IAAI;AAAA,QACJ,QAAQ;AAAA,QACR,OAAO;AAAA,MACT;AAAA,MACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MAClC,KAAK;AAAA,IACP;AAAA,EACF;AACF;;;ACxHO,IAAM,uBAAN,cAAmC,mBAAmB;AAAA,EAAtD;AAAA;AACL,SAAS,WAAkC;AAAA;AAAA;AAAA;AAAA;AAAA,EAK3C,QACE,SACA,UACS;AACT,QAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AAC3C,aAAO;AAAA,IACT;AAEA,UAAM,MAAM;AAGZ,QAAI,EAAE,WAAW,QAAQ,EAAE,aAAa,MAAM;AAC5C,aAAO;AAAA,IACT;AAGA,QAAI,OAAO,IAAI,UAAU,UAAU;AACjC,aAAO;AAAA,IACT;AAEA,QAAI,CAAC,IAAI,MAAM,WAAW,gBAAgB,GAAG;AAC3C,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,IAAI,WAAW,OAAO,IAAI,YAAY,UAAU;AACnD,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,IAAI;AACvB,WAAO,OAAO,WAAW,OAAO;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,MACE,SACA,UACqB;AACrB,QAAI,CAAC,KAAK,QAAQ,OAAO,GAAG;AAC1B,aAAO,KAAK,iBAAiB,SAAS,gCAAgC;AAAA,IACxE;AAEA,UAAM,iBAAiB;AAKvB,UAAM,QAAQ,eAAe,QAAQ;AACrC,UAAM,QAAQ,eAAe;AAG7B,QAAI,UAAU,yBAAyB;AACrC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,QACV;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,KAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,UAAU,yBAAyB;AAGrC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA;AAAA;AAAA,QAGV;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,KAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,UAAU,uBAAuB;AACnC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,UACR,OAAO;AAAA,QACT;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,KAAK;AAAA,MACP;AAAA,IACF;AAGA,WAAO,KAAK,iBAAiB,SAAS,iCAAiC,KAAK,EAAE;AAAA,EAChF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,SAAkB;AAGhB,WAAO;AAAA,EACT;AACF;AAKO,SAAS,6BAAmD;AACjE,SAAO,IAAI,qBAAqB;AAClC;;;ACtKA,yBAAmB;AA4CZ,IAAM,2BAAN,cAAuC,mBAAmB;AAAA,EAA1D;AAAA;AACL,SAAS,WAAkC;AAAA;AAAA;AAAA;AAAA;AAAA,EAK3C,QACE,SACA,UACS;AACT,QAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AAC3C,aAAO;AAAA,IACT;AAEA,UAAM,MAAM;AAGZ,QAAI,EAAE,mBAAmB,QAAQ,EAAE,YAAY,MAAM;AACnD,aAAO;AAAA,IACT;AAGA,QAAI,OAAO,IAAI,kBAAkB,UAAU;AACzC,aAAO;AAAA,IACT;AAGA,QAAI,IAAI,WAAW,eAAe,IAAI,WAAW,SAAS;AACxD,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MACE,SACA,UACqB;AACrB,QAAI,CAAC,KAAK,QAAQ,OAAO,GAAG;AAC1B,aAAO,KAAK,iBAAiB,SAAS,oCAAoC;AAAA,IAC5E;AAEA,UAAM,eAAe;AACrB,UAAM,eAAe,aAAa;AAClC,UAAM,SAAS,aAAa;AAE5B,QAAI,WAAW,aAAa;AAC1B,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA;AAAA;AAAA,QAGV;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,KAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,WAAW,SAAS;AACtB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,UACR,OAAO;AAAA,QACT;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,KAAK;AAAA,MACP;AAAA,IACF;AAGA,WAAO,KAAK,iBAAiB,SAAS,8BAA8B,MAAM,EAAE;AAAA,EAC9E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqBA,OAAO,SAAkB,SAA8C;AAErE,QAAI,CAAC,QAAQ,aAAa,CAAC,QAAQ,QAAQ;AACzC,aAAO;AAAA,IACT;AAEA,QAAI;AAEF,YAAM,OACJ,QAAQ,YAAY,OAAO,YAAY,WAAW,UAAU,KAAK,UAAU,OAAO;AAGpF,YAAM,OAAO,mBAAAC,QAAO,WAAW,UAAU,QAAQ,MAAM;AACvD,YAAM,aAAa,OAAO,SAAS,WAAW,OAAO,KAAK,IAAI,IAAI;AAClE,WAAK,OAAO,UAAU;AACtB,YAAM,oBAAoB,KAAK,OAAO,KAAK;AAG3C,aAAO,mBAAAA,QAAO,gBAAgB,OAAO,KAAK,QAAQ,SAAS,GAAG,OAAO,KAAK,iBAAiB,CAAC;AAAA,IAC9F,SAAS,OAAO;AAEd,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKO,SAAS,iCAA2D;AACzE,SAAO,IAAI,yBAAyB;AACtC;;;AC9HO,IAAM,yBAAN,cAAqC,mBAAmB;AAAA,EAAxD;AAAA;AACL,SAAS,WAAkC;AAAA;AAAA;AAAA;AAAA;AAAA,EAK3C,QACE,SACA,UACS;AACT,QAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AAC3C,aAAO;AAAA,IACT;AAEA,UAAM,MAAM;AAGZ,QAAI,EAAE,cAAc,QAAQ,EAAE,aAAa,MAAM;AAC/C,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,IAAI,YAAY,OAAO,IAAI,aAAa,UAAU;AACrD,aAAO;AAAA,IACT;AAEA,UAAM,WAAW,IAAI;AACrB,QAAI,EAAE,gBAAgB,WAAW;AAC/B,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,IAAI,WAAW,OAAO,IAAI,YAAY,UAAU;AACnD,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,IAAI;AACpB,WAAO,cAAc;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,MACE,SACA,UACqB;AACrB,QAAI,CAAC,KAAK,QAAQ,OAAO,GAAG;AAC1B,aAAO,KAAK,iBAAiB,SAAS,kCAAkC;AAAA,IAC1E;AAEA,UAAM,WAAW;AAEjB,QAAI;AAEF,YAAM,YAAY,SAAS,SAAS;AACpC,YAAM,WAAW,SAAS,SAAS;AACnC,YAAM,WAAW,SAAS,QAAQ,YAAY,CAAC;AAG/C,UAAI,SAAS,WAAW,GAAG;AACzB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,WAAW;AAAA,UACX,MAAM;AAAA,YACJ,IAAI,aAAa;AAAA,YACjB,QAAQ;AAAA,YACR,OAAO;AAAA,UACT;AAAA,UACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC,KAAK;AAAA,QACP;AAAA,MACF;AAEA,YAAM,UAAU,SAAS,CAAC;AAC1B,YAAM,eAAe,QAAQ,gBAAgB,CAAC;AAE9C,UAAI,aAAa,WAAW,GAAG;AAC7B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,WAAW;AAAA,UACX,MAAM;AAAA,YACJ,IAAI,aAAa;AAAA,YACjB,QAAQ;AAAA,YACR,OAAO;AAAA,UACT;AAAA,UACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC,KAAK;AAAA,QACP;AAAA,MACF;AAEA,YAAM,cAAc,aAAa,CAAC;AAClC,YAAM,aAAa,YAAY;AAG/B,UAAI,CAAC,YAAY;AACf,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf,WAAW;AAAA,UACX,MAAM;AAAA,YACJ,IAAI,aAAa;AAAA,YACjB,QAAQ;AAAA,YACR,OAAO;AAAA,UACT;AAAA,UACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC,KAAK;AAAA,QACP;AAAA,MACF;AAGA,YAAM,QACJ,YAAY,SAAS,YAAY,MAAM,SAAS,IAC5C,YAAY,MAAM,IAAI,CAAC,UAAU;AAAA,QAC/B,MAAM,KAAK,QAAQ;AAAA,QACnB,OAAO,KAAK,SAAS;AAAA,QACrB,KAAK,KAAK,OAAO;AAAA,QACjB,YAAY,KAAK;AAAA,MACnB,EAAE,IACF;AAGN,YAAM,WACJ,SAAS,QAAQ,cAAc,SAAS,QAAQ,WAAW,SAAS,IAChE,SAAS,QAAQ,WAAW,IAAI,CAAC,eAAe;AAAA,QAC9C,IAAI,UAAU,SAAS,SAAS,KAAK;AAAA,QACrC,SAAS,UAAU,SAAS,SAAS,KAAK;AAAA,QAC1C,MAAM,UAAU,cAAc;AAAA,QAC9B,YAAY,UAAU;AAAA,MACxB,EAAE,IACF;AAGN,YAAM,aACJ,SAAS,QAAQ,cAAc,SAAS,QAAQ,WAAW,SAAS,IAChE,SAAS,QAAQ,WAAW,IAAI,CAAC,eAAe;AAAA,QAC9C,MAAM,UAAU,cAAc;AAAA,QAC9B,OAAO,UAAU,SAAS;AAAA,QAC1B,KAAK,UAAU,OAAO;AAAA,QACtB,SAAS,UAAU,SAAS,SAAS;AAAA,QACrC,YAAY,UAAU;AAAA,QACtB,OACE,UAAU,SAAS,UAAU,MAAM,SAAS,IACxC,UAAU,MAAM,IAAI,CAAC,UAAU;AAAA,UAC7B,MAAM,KAAK,QAAQ;AAAA,UACnB,OAAO,KAAK,SAAS;AAAA,UACrB,KAAK,KAAK,OAAO;AAAA,UACjB,YAAY,KAAK;AAAA,QACnB,EAAE,IACF;AAAA,MACR,EAAE,IACF;AAGN,YAAM,UAAU,YAAY,YAAY,CAAC,GAAG;AAE5C,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI,aAAa;AAAA,UACjB,QAAQ;AAAA,UACR,MAAM;AAAA,UACN,YAAY,YAAY;AAAA,UACxB;AAAA,UACA,UAAU,SAAS,SAAS,SAAS,CAAC,KAAK;AAAA,UAC3C,UAAU,YAAY,SAAS,SAAS,IAAI,WAAW;AAAA,UACvD,OAAO,SAAS,MAAM,SAAS,IAAI,QAAQ;AAAA,UAC3C,YAAY,cAAc,WAAW,SAAS,IAAI,aAAa;AAAA,UAC/D;AAAA,UACA,UAAU;AAAA,YACR,UAAU,SAAS,SAAS;AAAA,YAC5B,SAAS,SAAS,SAAS;AAAA,YAC3B,QAAQ,SAAS,SAAS;AAAA,UAC5B;AAAA,QACF;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,KAAK;AAAA,MACP;AAAA,IACF,SAAS,OAAO;AACd,aAAO,KAAK;AAAA,QACV;AAAA,QACA,qCAAqC,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MAC/F;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,SAAkB;AAGhB,WAAO;AAAA,EACT;AACF;AAKO,SAAS,+BAAuD;AACrE,SAAO,IAAI,uBAAuB;AACpC;;;ACvQA,IAAAC,sBAAmB;AA8EZ,IAAM,sBAAN,cAAkC,mBAAmB;AAAA,EAArD;AAAA;AACL,SAAS,WAAkC;AAAA;AAAA;AAAA;AAAA;AAAA,EAK3C,QACE,SACA,UACS;AACT,QAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AAC3C,aAAO;AAAA,IACT;AAEA,UAAM,MAAM;AAGZ,QAAI,EAAE,YAAY,QAAQ,EAAE,eAAe,MAAM;AAC/C,aAAO;AAAA,IACT;AAGA,QAAI,OAAO,IAAI,WAAW,UAAU;AAClC,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,IAAI,OAAO,WAAW,eAAe,GAAG;AAC3C,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MACE,SACA,UACqB;AACrB,QAAI,CAAC,KAAK,QAAQ,OAAO,GAAG;AAC1B,aAAO,KAAK,iBAAiB,SAAS,+BAA+B;AAAA,IACvE;AAEA,UAAM,iBAAiB;AACvB,UAAM,SAAS,eAAe;AAC9B,UAAM,YAAY,eAAe;AAIjC,QAAI,kBAAkB;AACtB,QAAI,eAAe,MAAM;AACvB,YAAM,QAAQ,eAAe,KAAK,MAAM,4BAA4B;AACpE,UAAI,OAAO;AACT,0BAAkB,MAAM,CAAC;AAAA,MAC3B;AAAA,IACF;AAGA,QAAI,WAAW,wBAAwB;AACrC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,UACR,WAAW;AAAA,QACb;AAAA,QACA;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,WAAW,wBAAwB;AACrC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,QACV;AAAA,QACA;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,WAAW,0BAA0B;AACvC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,UACR,aAAa;AAAA;AAAA;AAAA,QAGf;AAAA,QACA;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAEA,QAAI,WAAW,uBAAuB;AACpC,aAAO;AAAA,QACL,SAAS;AAAA,QACT,UAAU,KAAK;AAAA,QACf,WAAW;AAAA,QACX,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,UACR,OAAO,eAAe,OAAO,WAAW;AAAA,UACxC,UAAU;AAAA,YACR,WAAW,eAAe,OAAO;AAAA,UACnC;AAAA,QACF;AAAA,QACA;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAGA,WAAO,KAAK,iBAAiB,SAAS,iCAAiC,MAAM,EAAE;AAAA,EACjF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwBA,OAAO,SAAkB,SAA8C;AAGrE,QAAI,CAAC,QAAQ,WAAW;AACtB,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,QAAQ,QAAQ;AACnB,aAAO;AAAA,IACT;AAEA,QAAI;AAEF,YAAM,OACJ,QAAQ,YAAY,OAAO,YAAY,WAAW,UAAU,KAAK,UAAU,OAAO;AAGpF,YAAM,OAAO,oBAAAC,QAAO,WAAW,UAAU,QAAQ,MAAM;AACvD,YAAM,aAAa,OAAO,SAAS,WAAW,OAAO,KAAK,IAAI,IAAI;AAClE,WAAK,OAAO,UAAU;AACtB,YAAM,oBAAoB,KAAK,OAAO,KAAK;AAG3C,aAAO,oBAAAA,QAAO,gBAAgB,OAAO,KAAK,QAAQ,SAAS,GAAG,OAAO,KAAK,iBAAiB,CAAC;AAAA,IAC9F,SAAS,OAAO;AAEd,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKO,SAAS,4BAAiD;AAC/D,SAAO,IAAI,oBAAoB;AACjC;;;AClOO,IAAM,6BAAN,cAAyC,mBAAmB;AAAA,EAA5D;AAAA;AACL,SAAS,WAAkC;AAAA;AAAA;AAAA;AAAA;AAAA,EAK3C,QACE,SACA,SACS;AAET,QAAI,SAAS,WAAW;AACtB,UAAI,CAAC,QAAQ,UAAU,SAAS,kBAAkB,GAAG;AACnD,eAAO;AAAA,MACT;AAAA,IACF;AAGA,QAAI,SAAS,aAAa;AACxB,YAAM,EAAE,IAAI,OAAO,IAAI,QAAQ;AAC/B,UAAI,CAAC,MAAM,CAAC,QAAQ;AAClB,eAAO;AAAA,MACT;AAAA,IACF;AAIA,QAAI,WAAW,OAAO,YAAY,UAAU;AAC1C,YAAM,MAAM;AAGZ,UAAI,YAAY,OAAO,SAAS,OAAO,cAAc,KAAK;AACxD,eAAO;AAAA,MACT;AAGA,UAAI,SAAS,OAAO,QAAQ,KAAK;AAC/B,eAAO;AAAA,MACT;AAAA,IACF;AAGA,WAAO,CAAC,CAAC,SAAS,aAAa,MAAM,CAAC,CAAC,SAAS,aAAa;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA,EAKA,SACE,SACA,SACoC;AAEpC,QAAI,CAAC,SAAS,aAAa,IAAI;AAC7B,aAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,IACF;AAEA,QAAI,CAAC,SAAS,aAAa,QAAQ;AACjC,aAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,IACF;AAGA,UAAM,gBAAgB,CAAC,WAAW,SAAS,eAAe,YAAY;AACtE,QAAI,CAAC,cAAc,SAAS,QAAQ,YAAY,MAAM,GAAG;AACvD,aAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO,yBAAyB,QAAQ,YAAY,MAAM;AAAA,MAC5D;AAAA,IACF;AAGA,QAAI,SAAS,aAAa,CAAC,QAAQ,UAAU,SAAS,kBAAkB,GAAG;AACzE,aAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO,EAAE,OAAO,KAAK;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAkB,SAAyE;AAC/F,UAAM,cAAc,SAAS,eAAe,CAAC;AAC7C,UAAM,QAAQ,YAAY;AAC1B,UAAM,SAAS,YAAY;AAG3B,QAAI;AACJ,QAAI,WAAW,WAAW;AACxB,kBAAY;AAAA,IACd,WAAW,WAAW,WAAW,WAAW,iBAAiB,WAAW,cAAc;AACpF,kBAAY;AAAA,IACd,OAAO;AACL,kBAAY;AAAA,IACd;AAGA,QAAI,WAAW,aAAa,WAAW,OAAO,YAAY,UAAU;AAClE,YAAM,aAAa;AAEnB,UAAI,WAAW,WAAW,WAAW,KAAK;AAExC,cAAM,OAAO,WAAW,QACrB,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE,YAAY,EACjD,IAAI,CAAC,MAAM,EAAE,aAAc,CAAC,GAAG,WAAW,EAAE,EAC5C,KAAK,GAAG;AAGX,cAAM,aAAa,oBAAI,IAAY;AACnC,mBAAW,QAAQ,QAAQ,CAAC,MAAM;AAChC,cAAI,EAAE,cAAc;AAClB,kBAAM,UAAU,EAAE,aAAa,CAAC,GAAG;AACnC,gBAAI,QAAS,YAAW,IAAI,OAAO;AAAA,UACrC;AAAA,QACF,CAAC;AAED,cAAM,WACJ,WAAW,OAAO,IACd,MAAM,KAAK,UAAU,EAAE,IAAI,CAAC,QAAQ;AAAA,UAClC;AAAA,UACA,OAAO,WAAW,EAAE;AAAA,QACtB,EAAE,IACF;AAEN,eAAO;AAAA,UACL,SAAS;AAAA,UACT,UAAU,KAAK;AAAA,UACf;AAAA,UACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC,MAAM;AAAA,YACJ,IAAI;AAAA,YACJ;AAAA,YACA,QAAQ;AAAA,YACR,UAAU,WAAW,SAAS,sBAAsB;AAAA,YACpD,UAAU,WAAW,IAAI;AAAA,YACzB;AAAA,YACA,WAAW,WAAW,IAAI;AAAA,UAC5B;AAAA,UACA,KAAK;AAAA,QACP;AAAA,MACF;AAAA,IACF;AAGA,WAAO;AAAA,MACL,SAAS,WAAW;AAAA,MACpB,UAAU,KAAK;AAAA,MACf;AAAA,MACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MAClC,MAAM;AAAA,QACJ,IAAI;AAAA,QACJ,MAAM;AAAA,QACN,QAAQ,WAAW,YAAY,cAAc;AAAA,MAC/C;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AACF;;;ACrDO,IAAM,gBAAN,MAAoB;AAAA,EAGzB,cAAc;AAEZ,SAAK,WAAW,oBAAI,IAAI;AAAA,MACtB,CAAC,UAAU,IAAI,qBAAqB,CAAC;AAAA,MACrC,CAAC,cAAc,IAAI,yBAAyB,CAAC;AAAA,MAC7C,CAAC,YAAY,IAAI,uBAAuB,CAAC;AAAA,MACzC,CAAC,aAAa,IAAI,oBAAoB,CAAC;AAAA,MACvC,CAAC,gBAAgB,IAAI,2BAA2B,CAAC;AAAA,IACnD,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,SAAkB,SAAqD;AAE3E,QAAI,SAAS,UAAU;AACrB,aAAO,KAAK,gBAAgB,SAAS,QAAQ,UAAU,OAAO;AAAA,IAChE;AAGA,UAAM,mBAAmB,KAAK,eAAe,SAAS;AAAA,MACpD,aAAa,SAAS;AAAA,MACtB,WAAW,SAAS;AAAA,IACtB,CAAC;AAED,QAAI,CAAC,kBAAkB;AACrB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,OAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO,KAAK,gBAAgB,SAAS,kBAAkB,OAAO;AAAA,EAChE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,eACE,SACA,SACmC;AAEnC,eAAW,CAAC,UAAU,OAAO,KAAK,KAAK,UAAU;AAC/C,UAAI,QAAQ,QAAQ,SAAS,OAAO,GAAG;AACrC,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,SAAS,SAAkB,SAAmD;AAE5E,QAAI,SAAS,UAAU;AACrB,YAAMC,WAAU,KAAK,SAAS,IAAI,QAAQ,QAAQ;AAClD,UAAI,CAACA,UAAS;AACZ,eAAO;AAAA,UACL,OAAO;AAAA,UACP,OAAO,qBAAqB,QAAQ,QAAQ;AAAA,QAC9C;AAAA,MACF;AACA,aAAOA,SAAQ,SAAS,SAAS;AAAA,QAC/B,aAAa,QAAQ;AAAA,QACrB,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IACH;AAGA,UAAM,mBAAmB,KAAK,eAAe,SAAS;AAAA,MACpD,aAAa,SAAS;AAAA,MACtB,WAAW,SAAS;AAAA,IACtB,CAAC;AAED,QAAI,CAAC,kBAAkB;AACrB,aAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAAA,IACF;AAEA,UAAM,UAAU,KAAK,SAAS,IAAI,gBAAgB;AAClD,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO,mCAAmC,gBAAgB;AAAA,MAC5D;AAAA,IACF;AAEA,WAAO,QAAQ,SAAS,SAAS;AAAA,MAC/B,aAAa,SAAS;AAAA,MACtB,WAAW,SAAS;AAAA,IACtB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,OACE,SACA,UACA,SACS;AACT,UAAM,UAAU,KAAK,SAAS,IAAI,QAAQ;AAC1C,QAAI,CAAC,WAAW,CAAC,QAAQ,QAAQ;AAE/B,aAAO;AAAA,IACT;AAEA,WAAO,QAAQ,OAAO,SAAS,OAAO;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKQ,gBACN,SACA,UACA,SACqB;AACrB,UAAM,UAAU,KAAK,SAAS,IAAI,QAAQ;AAE1C,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,QACL,SAAS;AAAA,QACT,OAAO,mCAAmC,QAAQ;AAAA,MACpD;AAAA,IACF;AAGA,QAAI,WAAW;AACf,QAAI,SAAS,oBAAoB,SAAS,SAAS,gBAAgB,QAAQ,QAAQ;AACjF,iBAAW,QAAQ,OAAO,SAAS,QAAQ,YAAY;AACvD,UAAI,CAAC,UAAU;AACb,eAAO;AAAA,UACL,SAAS;AAAA,UACT;AAAA,UACA,OAAO;AAAA,UACP,UAAU;AAAA,QACZ;AAAA,MACF;AAAA,IACF;AAGA,UAAM,aAAa,QAAQ,SAAS,SAAS;AAAA,MAC3C,aAAa,SAAS;AAAA,MACtB,WAAW,SAAS;AAAA,IACtB,CAAC;AACD,QAAI,CAAC,WAAW,OAAO;AACrB,aAAO;AAAA,QACL,SAAS;AAAA,QACT;AAAA,QACA,OAAO,WAAW;AAAA,QAClB;AAAA,MACF;AAAA,IACF;AAGA,QAAI;AACF,YAAM,QAAQ,QAAQ,MAAM,SAAS;AAAA,QACnC,aAAa,SAAS;AAAA,MACxB,CAAC;AAED,aAAO;AAAA,QACL,SAAS;AAAA,QACT;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,aAAO;AAAA,QACL,SAAS;AAAA,QACT;AAAA,QACA,OAAO,4BAA4B,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,QAC3F;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,WAAW,UAAiE;AAC1E,WAAO,KAAK,SAAS,IAAI,QAAQ;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAwC;AACtC,WAAO,MAAM,KAAK,KAAK,SAAS,KAAK,CAAC;AAAA,EACxC;AACF;AAKO,SAAS,sBAAqC;AACnD,SAAO,IAAI,cAAc;AAC3B;","names":["schema_exports","axios","axios","axios","WebSocket","import_ws","import_axios","schema_exports","axios","axios","WebSocket","import_axios","import_ws","axios","WebSocket","import_axios","import_axios","axios","axios","axios","import_axios","import_axios","axios","response","axios","import_axios","axios","crypto","import_node_crypto","crypto","handler"]}