@dataclouder/nest-vertex 0.0.68 → 0.0.70

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -32,7 +32,7 @@ let VertexImageVideoAdapterController = class VertexImageVideoAdapterController
32
32
  if (provider === 'local') {
33
33
  }
34
34
  else {
35
- return this.vertexImageVideoService.listModels();
35
+ return this.vertexImageVideoService.listModelsVertex();
36
36
  }
37
37
  }
38
38
  async generateImage(generateImageDto) {
@@ -60,7 +60,7 @@ __decorate([
60
60
  ], VertexImageVideoAdapterController.prototype, "generateImage", null);
61
61
  exports.VertexImageVideoAdapterController = VertexImageVideoAdapterController = __decorate([
62
62
  (0, swagger_1.ApiTags)('[Image] Adapter'),
63
- (0, common_1.Controller)('api/ai-services/image/adapter'),
63
+ (0, common_1.Controller)('api/ai-services/adapter/image'),
64
64
  (0, common_2.UseFilters)(nest_core_1.AllExceptionsHandler),
65
65
  __metadata("design:paramtypes", [vertex_image_video_service_1.VertexImageVideoService,
66
66
  adapter_image_gen_service_1.ImageGenAdapterService])
@@ -1,4 +1,5 @@
1
1
  import { type MulterFile } from '@webundsoehne/nest-fastify-file-upload';
2
+ import { TranscribeUrlDto } from '../../dto/transcribe-url.dto';
2
3
  import { GroqService } from '../../services/whisper/groq.service';
3
4
  import { LocalSttService } from '../../services/whisper/local-stt.service';
4
5
  export declare class AdapterSttController {
@@ -6,5 +7,6 @@ export declare class AdapterSttController {
6
7
  private readonly localSttService;
7
8
  private readonly logger;
8
9
  constructor(groqService: GroqService, localSttService: LocalSttService);
9
- processAudio(file: MulterFile, provider?: 'local' | 'groq'): Promise<any>;
10
+ processAudio(file: MulterFile, provider?: 'local' | 'groq' | 'default'): Promise<any>;
11
+ transcribe(body: TranscribeUrlDto, provider?: 'local' | 'groq'): Promise<any>;
10
12
  }
@@ -17,6 +17,7 @@ exports.AdapterSttController = void 0;
17
17
  const common_1 = require("@nestjs/common");
18
18
  const nest_fastify_file_upload_1 = require("@webundsoehne/nest-fastify-file-upload");
19
19
  const swagger_1 = require("@nestjs/swagger");
20
+ const transcribe_url_dto_1 = require("../../dto/transcribe-url.dto");
20
21
  const nest_core_1 = require("@dataclouder/nest-core");
21
22
  const groq_service_1 = require("../../services/whisper/groq.service");
22
23
  const local_stt_service_1 = require("../../services/whisper/local-stt.service");
@@ -28,7 +29,7 @@ let AdapterSttController = AdapterSttController_1 = class AdapterSttController {
28
29
  this.groqService = groqService;
29
30
  this.localSttService = localSttService;
30
31
  }
31
- async processAudio(file, provider = 'local') {
32
+ async processAudio(file, provider = 'default') {
32
33
  this.logger.log(`Receive request with provider: ${provider}`);
33
34
  if (!file || !file.buffer) {
34
35
  this.logger.error('No file buffer received.');
@@ -36,6 +37,7 @@ let AdapterSttController = AdapterSttController_1 = class AdapterSttController {
36
37
  }
37
38
  this.logger.log(`Received file: ${file.originalname}, mimetype: ${file.mimetype}, size: ${file.size}`);
38
39
  try {
40
+ provider = provider == 'default' ? 'groq' : provider;
39
41
  let result;
40
42
  if (provider === 'groq') {
41
43
  result = await this.groqService.transcribeAudio(file.buffer, file.originalname, file.mimetype);
@@ -50,6 +52,23 @@ let AdapterSttController = AdapterSttController_1 = class AdapterSttController {
50
52
  return { error: 'Failed to transcribe audio.', details: error.message };
51
53
  }
52
54
  }
55
+ async transcribe(body, provider = 'local') {
56
+ this.logger.log(`Receive request to transcribe from URL: ${body.url} with provider: ${provider}`);
57
+ try {
58
+ let result;
59
+ if (provider === 'groq') {
60
+ result = await this.groqService.transcribeUrl(body.url);
61
+ }
62
+ else {
63
+ result = await this.localSttService.transcribeUrl(body.url);
64
+ }
65
+ return result;
66
+ }
67
+ catch (error) {
68
+ this.logger.error('Error during transcription process from URL:', error);
69
+ return { error: 'Failed to transcribe audio from URL.', details: error.message };
70
+ }
71
+ }
53
72
  };
54
73
  exports.AdapterSttController = AdapterSttController;
55
74
  __decorate([
@@ -59,7 +78,7 @@ __decorate([
59
78
  (0, nest_fastify_file_upload_1.ApiFileBody)('file'),
60
79
  (0, swagger_1.ApiQuery)({
61
80
  name: 'provider',
62
- enum: ['local', 'groq'],
81
+ enum: ['local', 'groq', 'default'],
63
82
  required: false,
64
83
  description: 'The provider to use for transcription. Defaults to "local".',
65
84
  }),
@@ -69,6 +88,21 @@ __decorate([
69
88
  __metadata("design:paramtypes", [Object, String]),
70
89
  __metadata("design:returntype", Promise)
71
90
  ], AdapterSttController.prototype, "processAudio", null);
91
+ __decorate([
92
+ (0, common_1.Post)('transcribe'),
93
+ (0, swagger_1.ApiBody)({ type: transcribe_url_dto_1.TranscribeUrlDto }),
94
+ (0, swagger_1.ApiQuery)({
95
+ name: 'provider',
96
+ enum: ['local', 'groq'],
97
+ required: false,
98
+ description: 'The provider to use for transcription. Defaults to "local".',
99
+ }),
100
+ __param(0, (0, common_1.Body)()),
101
+ __param(1, (0, common_1.Query)('provider')),
102
+ __metadata("design:type", Function),
103
+ __metadata("design:paramtypes", [transcribe_url_dto_1.TranscribeUrlDto, String]),
104
+ __metadata("design:returntype", Promise)
105
+ ], AdapterSttController.prototype, "transcribe", null);
72
106
  exports.AdapterSttController = AdapterSttController = AdapterSttController_1 = __decorate([
73
107
  (0, swagger_1.ApiTags)('[STT] Adapter'),
74
108
  (0, common_1.Controller)('api/ai-services/adapter/stt'),
@@ -1,8 +1,10 @@
1
1
  import { type MulterFile } from '@webundsoehne/nest-fastify-file-upload';
2
+ import { TranscribeUrlDto } from '../../dto/transcribe-url.dto';
2
3
  import { GroqService } from '../../services/whisper/groq.service';
3
4
  export declare class GroqSttController {
4
5
  private readonly groqService;
5
6
  private readonly logger;
6
7
  constructor(groqService: GroqService);
7
8
  processAudio(file: MulterFile): Promise<any>;
9
+ transcribe(body: TranscribeUrlDto): Promise<any>;
8
10
  }
@@ -17,6 +17,7 @@ exports.GroqSttController = void 0;
17
17
  const common_1 = require("@nestjs/common");
18
18
  const nest_fastify_file_upload_1 = require("@webundsoehne/nest-fastify-file-upload");
19
19
  const swagger_1 = require("@nestjs/swagger");
20
+ const transcribe_url_dto_1 = require("../../dto/transcribe-url.dto");
20
21
  const groq_service_1 = require("../../services/whisper/groq.service");
21
22
  const nest_core_1 = require("@dataclouder/nest-core");
22
23
  let GroqSttController = GroqSttController_1 = class GroqSttController {
@@ -41,6 +42,17 @@ let GroqSttController = GroqSttController_1 = class GroqSttController {
41
42
  return { error: 'Failed to transcribe audio.', details: error.message };
42
43
  }
43
44
  }
45
+ async transcribe(body) {
46
+ this.logger.log(`Receive request to transcribe from URL: ${body.url}`);
47
+ try {
48
+ const result = await this.groqService.transcribeUrl(body.url);
49
+ return result;
50
+ }
51
+ catch (error) {
52
+ this.logger.error('Error during transcription process from URL:', error);
53
+ return { error: 'Failed to transcribe audio from URL.', details: error.message };
54
+ }
55
+ }
44
56
  };
45
57
  exports.GroqSttController = GroqSttController;
46
58
  __decorate([
@@ -53,6 +65,14 @@ __decorate([
53
65
  __metadata("design:paramtypes", [Object]),
54
66
  __metadata("design:returntype", Promise)
55
67
  ], GroqSttController.prototype, "processAudio", null);
68
+ __decorate([
69
+ (0, common_1.Post)('transcribe'),
70
+ (0, swagger_1.ApiBody)({ type: transcribe_url_dto_1.TranscribeUrlDto }),
71
+ __param(0, (0, common_1.Body)()),
72
+ __metadata("design:type", Function),
73
+ __metadata("design:paramtypes", [transcribe_url_dto_1.TranscribeUrlDto]),
74
+ __metadata("design:returntype", Promise)
75
+ ], GroqSttController.prototype, "transcribe", null);
56
76
  exports.GroqSttController = GroqSttController = GroqSttController_1 = __decorate([
57
77
  (0, swagger_1.ApiTags)('[STT] Groq'),
58
78
  (0, common_1.Controller)('api/ai-services/groq/stt'),
@@ -1,8 +1,10 @@
1
1
  import { type MulterFile } from '@webundsoehne/nest-fastify-file-upload';
2
+ import { TranscribeUrlDto } from '../../dto/transcribe-url.dto';
2
3
  import { LocalSttService } from '../../services/whisper/local-stt.service';
3
4
  export declare class LocalSttController {
4
5
  private readonly localSttService;
5
6
  private readonly logger;
6
7
  constructor(localSttService: LocalSttService);
7
8
  processAudio(file: MulterFile): Promise<any>;
9
+ transcribe(body: TranscribeUrlDto): Promise<any>;
8
10
  }
@@ -17,6 +17,7 @@ exports.LocalSttController = void 0;
17
17
  const common_1 = require("@nestjs/common");
18
18
  const nest_fastify_file_upload_1 = require("@webundsoehne/nest-fastify-file-upload");
19
19
  const swagger_1 = require("@nestjs/swagger");
20
+ const transcribe_url_dto_1 = require("../../dto/transcribe-url.dto");
20
21
  const nest_core_1 = require("@dataclouder/nest-core");
21
22
  const local_stt_service_1 = require("../../services/whisper/local-stt.service");
22
23
  let LocalSttController = LocalSttController_1 = class LocalSttController {
@@ -41,6 +42,17 @@ let LocalSttController = LocalSttController_1 = class LocalSttController {
41
42
  return { error: 'Failed to transcribe audio.', details: error.message };
42
43
  }
43
44
  }
45
+ async transcribe(body) {
46
+ this.logger.log(`Receive request to transcribe from URL: ${body.url}`);
47
+ try {
48
+ const result = await this.localSttService.transcribeUrl(body.url);
49
+ return result;
50
+ }
51
+ catch (error) {
52
+ this.logger.error('Error during transcription process from URL:', error);
53
+ return { error: 'Failed to transcribe audio from URL.', details: error.message };
54
+ }
55
+ }
44
56
  };
45
57
  exports.LocalSttController = LocalSttController;
46
58
  __decorate([
@@ -53,6 +65,14 @@ __decorate([
53
65
  __metadata("design:paramtypes", [Object]),
54
66
  __metadata("design:returntype", Promise)
55
67
  ], LocalSttController.prototype, "processAudio", null);
68
+ __decorate([
69
+ (0, common_1.Post)('transcribe'),
70
+ (0, swagger_1.ApiBody)({ type: transcribe_url_dto_1.TranscribeUrlDto }),
71
+ __param(0, (0, common_1.Body)()),
72
+ __metadata("design:type", Function),
73
+ __metadata("design:paramtypes", [transcribe_url_dto_1.TranscribeUrlDto]),
74
+ __metadata("design:returntype", Promise)
75
+ ], LocalSttController.prototype, "transcribe", null);
56
76
  exports.LocalSttController = LocalSttController = LocalSttController_1 = __decorate([
57
77
  (0, swagger_1.ApiTags)('[STT] Local Speaches'),
58
78
  (0, common_1.Controller)('api/ai-services/local/stt'),
@@ -52,14 +52,8 @@ let VideoAdapterController = VideoAdapterController_1 = class VideoAdapterContro
52
52
  async generateVideo(generateVideoDto, provider) {
53
53
  this.logger.log(`Received request to generate video: ${JSON.stringify(generateVideoDto)}`);
54
54
  this.logger.log(`Generate video with provider: ${provider}`);
55
- try {
56
- const operation = await this.vertexImageVideoService.generateContentFromMLDev();
57
- return { operation };
58
- }
59
- catch (error) {
60
- this.logger.error('Error in generateVideo endpoint:', error);
61
- throw error;
62
- }
55
+ const operation = await this.vertexImageVideoService.generateContentFromMLDev();
56
+ return { operation };
63
57
  }
64
58
  async generateVideoFromAssetId(id) {
65
59
  this.logger.log(`Received request to generate video from asset with id: ${id}`);
@@ -69,8 +63,9 @@ let VideoAdapterController = VideoAdapterController_1 = class VideoAdapterContro
69
63
  throw new common_1.NotFoundException(`Asset with id ${id} Does not have FIRST FRAME image url.`);
70
64
  }
71
65
  if (asset.provider === 'comfy') {
66
+ this.logger.log(`Starting COMFY Generation for asset ID ${id}`);
72
67
  const videoResponse = await this.comfySDKService.runVideoGenerationForAssetId(id);
73
- this.logger.log(`ComfyUI video generation started. Response: ${JSON.stringify(videoResponse)}`);
68
+ this.logger.log(`ComfyUI video generation finished for asset ${id}. Response: ${JSON.stringify(videoResponse)}`);
74
69
  return videoResponse;
75
70
  }
76
71
  else {
@@ -203,7 +198,7 @@ __decorate([
203
198
  ], VideoAdapterController.prototype, "getVeoVideoStatus", null);
204
199
  exports.VideoAdapterController = VideoAdapterController = VideoAdapterController_1 = __decorate([
205
200
  (0, swagger_1.ApiTags)('[VIDEO] Adapter'),
206
- (0, common_1.Controller)('api/vertex-adapter/video'),
201
+ (0, common_1.Controller)('api/adapter/video'),
207
202
  (0, common_1.UseFilters)(nest_core_1.AllExceptionsHandler),
208
203
  __metadata("design:paramtypes", [vertex_image_video_service_1.VertexImageVideoService,
209
204
  vertex_veo_gcp_service_1.VertexVeoGcpService,
@@ -1,3 +1,7 @@
1
1
  export declare class PromptDto {
2
2
  prompt: string;
3
+ aspectRatio?: string;
4
+ model?: string;
5
+ numberOfImages?: number;
6
+ provider?: string;
3
7
  }
package/dto/prompt.dto.js CHANGED
@@ -14,15 +14,63 @@ const swagger_1 = require("@nestjs/swagger");
14
14
  const class_validator_1 = require("class-validator");
15
15
  class PromptDto {
16
16
  prompt;
17
+ aspectRatio = '9:16';
18
+ model = 'imagen-4.0-generate-001';
19
+ numberOfImages = 1;
20
+ provider = 'google';
17
21
  }
18
22
  exports.PromptDto = PromptDto;
19
23
  __decorate([
20
24
  (0, swagger_1.ApiProperty)({
21
25
  description: 'The text prompt for image generation.',
22
- example: 'a futuristic cityscape at sunset',
26
+ example: 'Sunlight streams through a window. Low-key photograph with speed photography motion blur, beautiful bokeh, and natural lighting.',
23
27
  }),
24
28
  (0, class_validator_1.IsNotEmpty)(),
25
29
  (0, class_validator_1.IsString)(),
26
30
  __metadata("design:type", String)
27
31
  ], PromptDto.prototype, "prompt", void 0);
32
+ __decorate([
33
+ (0, swagger_1.ApiProperty)({
34
+ description: 'The aspect ratio of the generated image.',
35
+ example: '9:16',
36
+ default: '9:16',
37
+ required: false,
38
+ }),
39
+ (0, class_validator_1.IsOptional)(),
40
+ (0, class_validator_1.IsString)(),
41
+ __metadata("design:type", String)
42
+ ], PromptDto.prototype, "aspectRatio", void 0);
43
+ __decorate([
44
+ (0, swagger_1.ApiProperty)({
45
+ description: 'The model to use for image generation.',
46
+ example: 'imagen-4.0-generate-001',
47
+ default: 'imagen-4.0-generate-001',
48
+ required: false,
49
+ }),
50
+ (0, class_validator_1.IsOptional)(),
51
+ (0, class_validator_1.IsString)(),
52
+ __metadata("design:type", String)
53
+ ], PromptDto.prototype, "model", void 0);
54
+ __decorate([
55
+ (0, swagger_1.ApiProperty)({
56
+ description: 'The number of images to generate.',
57
+ example: 1,
58
+ default: 1,
59
+ required: false,
60
+ }),
61
+ (0, class_validator_1.IsOptional)(),
62
+ (0, class_validator_1.IsNumber)(),
63
+ __metadata("design:type", Number)
64
+ ], PromptDto.prototype, "numberOfImages", void 0);
65
+ __decorate([
66
+ (0, swagger_1.ApiProperty)({
67
+ description: 'The provider for image generation.',
68
+ example: 'google',
69
+ default: 'google',
70
+ required: false,
71
+ }),
72
+ (0, class_validator_1.IsOptional)(),
73
+ (0, class_validator_1.IsString)(),
74
+ __metadata("design:type", String)
75
+ ], PromptDto.prototype, "provider", void 0);
28
76
  //# sourceMappingURL=prompt.dto.js.map
@@ -0,0 +1,3 @@
1
+ export declare class TranscribeUrlDto {
2
+ url: string;
3
+ }
@@ -0,0 +1,27 @@
1
+ "use strict";
2
+ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
3
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
4
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
5
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
6
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
7
+ };
8
+ var __metadata = (this && this.__metadata) || function (k, v) {
9
+ if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
10
+ };
11
+ Object.defineProperty(exports, "__esModule", { value: true });
12
+ exports.TranscribeUrlDto = void 0;
13
+ const swagger_1 = require("@nestjs/swagger");
14
+ const class_validator_1 = require("class-validator");
15
+ class TranscribeUrlDto {
16
+ url;
17
+ }
18
+ exports.TranscribeUrlDto = TranscribeUrlDto;
19
+ __decorate([
20
+ (0, swagger_1.ApiProperty)({
21
+ description: 'The URL of the audio file to transcribe.',
22
+ example: 'https://storage.googleapis.com/gtv-videos-bucket/sample/ForBiggerFun.mp4',
23
+ }),
24
+ (0, class_validator_1.IsUrl)(),
25
+ __metadata("design:type", String)
26
+ ], TranscribeUrlDto.prototype, "url", void 0);
27
+ //# sourceMappingURL=transcribe-url.dto.js.map
@@ -20,7 +20,6 @@ export interface ChatJsonResponse extends MessageLLM {
20
20
  }
21
21
  export interface ChatLLMRequestAdapter {
22
22
  messages: MessageLLM[];
23
- provider?: string;
24
23
  model?: Partial<IAIModel>;
25
24
  returnJson?: boolean;
26
25
  tierType?: TierType;
@@ -21,7 +21,8 @@ export declare const ModelLevel: {
21
21
  export declare enum GeminiVideoModels {
22
22
  Veo2 = "veo-2.0-generate-001",
23
23
  Veo3 = "veo-3.0-generate-001",
24
- Veo3Fast = "veo-3.0-fast-generate-001"
24
+ Veo3_1Fast = "veo-3.1-fast-generate-001",
25
+ Veo3_1 = "veo-3.1-generate-001"
25
26
  }
26
27
  export declare const GeminiImageModelsList: {
27
28
  name: string;
@@ -28,7 +28,8 @@ var GeminiVideoModels;
28
28
  (function (GeminiVideoModels) {
29
29
  GeminiVideoModels["Veo2"] = "veo-2.0-generate-001";
30
30
  GeminiVideoModels["Veo3"] = "veo-3.0-generate-001";
31
- GeminiVideoModels["Veo3Fast"] = "veo-3.0-fast-generate-001";
31
+ GeminiVideoModels["Veo3_1Fast"] = "veo-3.1-fast-generate-001";
32
+ GeminiVideoModels["Veo3_1"] = "veo-3.1-generate-001";
32
33
  })(GeminiVideoModels || (exports.GeminiVideoModels = GeminiVideoModels = {}));
33
34
  exports.GeminiImageModelsList = [
34
35
  { name: 'imagen 4 Ultra Preview', id: GeminiImageModels.Image4_0Ultra },
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@dataclouder/nest-vertex",
3
- "version": "0.0.68",
3
+ "version": "0.0.70",
4
4
  "description": "NestJS Vertex AI library for Dataclouder",
5
5
  "author": "dataclouder",
6
6
  "license": "MIT",
@@ -92,7 +92,7 @@ let LLMAdapterService = LLMAdapterService_1 = class LLMAdapterService {
92
92
  }
93
93
  }
94
94
  getQualityModel(model) {
95
- if (model.provider === 'google') {
95
+ if (model.provider === 'google' || !model?.provider) {
96
96
  return this.geminiChatService.getDefaultQualityModel(model?.quality || adapter_models_1.EModelQuality.FAST);
97
97
  }
98
98
  throw new nest_core_1.AppException({
@@ -73,7 +73,7 @@ class Video extends BaseService {
73
73
  return this.post(endpoint, dto);
74
74
  }
75
75
  async generateFromAssetId(id) {
76
- const endpoint = '/api/vertex-adapter/video/generate-video-from-asset-id/' + id;
76
+ const endpoint = '/api/adapter/video/generate-video-from-asset-id/' + id;
77
77
  return this.post(endpoint, {});
78
78
  }
79
79
  async generateVeo(dto) {
@@ -92,7 +92,7 @@ let VertexVeoGcpService = class VertexVeoGcpService {
92
92
  projectId = 'elite-ethos-467218-u1';
93
93
  locationId = 'us-central1';
94
94
  apiEndpoint = 'us-central1-aiplatform.googleapis.com';
95
- modelId = gemini_models_1.GeminiVideoModels.Veo3Fast;
95
+ modelId = gemini_models_1.GeminiVideoModels.Veo3_1Fast;
96
96
  constructor(httpService, mongoService, generatedAssetModel, cloudStorageService) {
97
97
  this.httpService = httpService;
98
98
  this.mongoService = mongoService;
@@ -142,7 +142,7 @@ let VertexVeoGcpService = class VertexVeoGcpService {
142
142
  const videoModel = params?.model || this.modelId;
143
143
  const url = `https://${this.apiEndpoint}/v1/projects/${this.projectId}/locations/${this.locationId}/publishers/google/models/${videoModel}:predictLongRunning`;
144
144
  if (!accessToken) {
145
- accessToken = await getAccessToken() || undefined;
145
+ accessToken = (await getAccessToken()) || undefined;
146
146
  this.logger.log(`Generated access token: ${accessToken}`);
147
147
  }
148
148
  const defaultParams = {
@@ -209,7 +209,7 @@ let VertexVeoGcpService = class VertexVeoGcpService {
209
209
  }
210
210
  const url = `https://${this.apiEndpoint}/v1/projects/${this.projectId}/locations/${this.locationId}/publishers/google/models/${this.modelId}:fetchPredictOperation`;
211
211
  if (!accessToken) {
212
- accessToken = await getAccessToken();
212
+ accessToken = (await getAccessToken());
213
213
  }
214
214
  const requestBody = {
215
215
  operationName,
@@ -291,7 +291,7 @@ let VertexVeoGcpService = class VertexVeoGcpService {
291
291
  this.logger.error(`Error polling for ${operationName}`, error);
292
292
  }
293
293
  const delay = elapsedTime < initialPhaseDuration ? initialPhaseDelay : finalPhaseDelay;
294
- await new Promise((resolve) => setTimeout(resolve, delay));
294
+ await new Promise(resolve => setTimeout(resolve, delay));
295
295
  }
296
296
  this.logger.warn(`Polling for ${operationName} timed out after 5 minutes.`);
297
297
  const asset = await this.generatedAssetModel.findOne({ operationId: operationName });
@@ -1,6 +1,6 @@
1
1
  import { Model } from 'mongoose';
2
2
  import { GeneratedAsset } from '../models/generated-asset.entity';
3
- import { CloudStorageService } from '@dataclouder/nest-storage';
3
+ import { CloudFileStorage, CloudStorageService } from '@dataclouder/nest-storage';
4
4
  import { GeneratedAssetService } from './generated-asset.service';
5
5
  import { HttpService } from '@nestjs/axios';
6
6
  import { GoogleGenaiService } from './google-genai.service';
@@ -15,7 +15,9 @@ export declare class VertexVeoGenaiService {
15
15
  generateVideo(): Promise<any>;
16
16
  generateVideoFromAssetId(assetId: string, metadata?: any): Promise<{
17
17
  url: string | undefined;
18
- result: Partial<import("@dataclouder/nest-storage").CloudFileStorage>;
18
+ result: Partial<CloudFileStorage>;
19
19
  }>;
20
20
  private _generateVideo;
21
+ private _generateThumbnail;
22
+ private _generateGif;
21
23
  }
@@ -44,6 +44,9 @@ var __metadata = (this && this.__metadata) || function (k, v) {
44
44
  var __param = (this && this.__param) || function (paramIndex, decorator) {
45
45
  return function (target, key) { decorator(target, key, paramIndex); }
46
46
  };
47
+ var __importDefault = (this && this.__importDefault) || function (mod) {
48
+ return (mod && mod.__esModule) ? mod : { "default": mod };
49
+ };
47
50
  Object.defineProperty(exports, "__esModule", { value: true });
48
51
  exports.VertexVeoGenaiService = void 0;
49
52
  const common_1 = require("@nestjs/common");
@@ -55,6 +58,8 @@ const generated_asset_service_1 = require("./generated-asset.service");
55
58
  const axios_1 = require("@nestjs/axios");
56
59
  const uuid_1 = require("uuid");
57
60
  const fs = __importStar(require("fs/promises"));
61
+ const fluent_ffmpeg_1 = __importDefault(require("fluent-ffmpeg"));
62
+ const path = __importStar(require("path"));
58
63
  const google_genai_service_1 = require("./google-genai.service");
59
64
  const gemini_models_1 = require("../models/gemini-models");
60
65
  const key_balancer_models_1 = require("../models/key-balancer.models");
@@ -76,18 +81,18 @@ let VertexVeoGenaiService = class VertexVeoGenaiService {
76
81
  async generateVideo() {
77
82
  const prompt = 'Panning wide shot of a calico kitten sleeping in the sunshine';
78
83
  const { client, balancedKey } = await this.googleGenaiService.getGoogleGenAIClient({
79
- model: gemini_models_1.GeminiVideoModels.Veo3Fast,
84
+ model: gemini_models_1.GeminiVideoModels.Veo3_1Fast,
80
85
  keyTierType: key_balancer_models_1.TierType.TIER_1,
81
86
  aiType: key_balancer_models_1.ModelType.VIDEO,
82
87
  });
83
- console.log('the balance key for video is ', balancedKey);
88
+ console.log(`key balancer for video is ${balancedKey.name} ${balancedKey.service} , ${balancedKey.totalRequest}`);
84
89
  const imagenResponse = await client.models.generateImages({
85
90
  model: 'imagen-3.0-generate-002',
86
91
  prompt: prompt,
87
92
  });
88
93
  this.logger.log(imagenResponse);
89
94
  const videoRequest = {
90
- model: gemini_models_1.GeminiVideoModels.Veo3Fast,
95
+ model: gemini_models_1.GeminiVideoModels.Veo3_1Fast,
91
96
  prompt: prompt,
92
97
  image: {
93
98
  imageBytes: imagenResponse.generatedImages[0].image.imageBytes,
@@ -107,7 +112,7 @@ let VertexVeoGenaiService = class VertexVeoGenaiService {
107
112
  const response = await this.httpService.axiosRef.get(imageUrl, { responseType: 'arraybuffer' });
108
113
  const imageBufferBytes = Buffer.from(response.data, 'binary');
109
114
  const videoRequest = {
110
- model: gemini_models_1.GeminiVideoModels.Veo3Fast,
115
+ model: gemini_models_1.GeminiVideoModels.Veo3_1Fast,
111
116
  prompt: genAsset.prompt || genAsset?.description || 'Random movement for video',
112
117
  config: {
113
118
  aspectRatio: '9:16',
@@ -129,12 +134,12 @@ let VertexVeoGenaiService = class VertexVeoGenaiService {
129
134
  async _generateVideo(videoRequest) {
130
135
  console.log('Generating video...', videoRequest);
131
136
  const { client, balancedKey } = await this.googleGenaiService.getGoogleGenAIClient({
132
- model: gemini_models_1.GeminiVideoModels.Veo3Fast,
137
+ model: gemini_models_1.GeminiVideoModels.Veo3_1Fast,
133
138
  keyTierType: key_balancer_models_1.TierType.TIER_1,
134
139
  aiType: key_balancer_models_1.ModelType.VIDEO,
135
140
  });
136
141
  videoRequest.model = 'veo-3.1-fast-generate-preview';
137
- console.log('the balance key for video is ', balancedKey);
142
+ console.log(`the balance key for video is ${balancedKey.name} ${balancedKey.service}`);
138
143
  let operation = await client.models.generateVideos(videoRequest);
139
144
  const filename = `veo_video_${(0, uuid_1.v4)()}.mp4`;
140
145
  let seconds = 0;
@@ -159,6 +164,13 @@ let VertexVeoGenaiService = class VertexVeoGenaiService {
159
164
  console.log('Subiendo video a storage', filename, process.env.STORAGE_BUCKET);
160
165
  const uploadResult = await this.cloudStorageService.uploadFileAndMakePublic(process.env.STORAGE_BUCKET, `generated-videos/${filename}`, videoBuffer, 'video/mp4');
161
166
  this.logger.log(`Generated video uploaded to ${uploadResult.url}`);
167
+ const thumbnail = await this._generateThumbnail(filename);
168
+ this.logger.log(`Generated thumbnail uploaded to ${thumbnail.url}`);
169
+ const gif = await this._generateGif(filename);
170
+ this.logger.log(`Generated gif uploaded to ${gif.url}`);
171
+ uploadResult['thumbnail'] = thumbnail;
172
+ uploadResult['gif'] = gif;
173
+ this.logger.log(`Final Result is ${JSON.stringify(uploadResult)}`);
162
174
  return {
163
175
  url: uploadResult.url,
164
176
  result: uploadResult,
@@ -168,12 +180,67 @@ let VertexVeoGenaiService = class VertexVeoGenaiService {
168
180
  try {
169
181
  await fs.unlink(filename);
170
182
  this.logger.log(`Successfully deleted temporary video file: ${filename}`);
183
+ const thumbnailFilename = filename.replace('.mp4', '.jpg');
184
+ await fs.unlink(thumbnailFilename).catch(() => { });
185
+ const gifFilename = filename.replace('.mp4', '.gif');
186
+ await fs.unlink(gifFilename).catch(() => { });
171
187
  }
172
188
  catch (error) {
173
189
  this.logger.error(`Failed to delete temporary video file: ${filename}`, error);
174
190
  }
175
191
  }
176
192
  }
193
+ _generateThumbnail(videoPath) {
194
+ return new Promise((resolve, reject) => {
195
+ const thumbnailFilename = videoPath.replace('.mp4', '.jpg');
196
+ (0, fluent_ffmpeg_1.default)(videoPath)
197
+ .on('end', async () => {
198
+ this.logger.log(`Thumbnail generated: ${thumbnailFilename}`);
199
+ const thumbnailBuffer = await fs.readFile(thumbnailFilename);
200
+ const uploadResult = await this.cloudStorageService.uploadFileAndMakePublic(process.env.STORAGE_BUCKET, `generated-videos/${path.basename(thumbnailFilename)}`, thumbnailBuffer, 'image/jpeg');
201
+ if (uploadResult.url) {
202
+ resolve(uploadResult);
203
+ }
204
+ else {
205
+ reject(new Error('Thumbnail upload failed, URL not returned.'));
206
+ }
207
+ })
208
+ .on('error', err => {
209
+ this.logger.error(`Error generating thumbnail: ${err.message}`);
210
+ reject(err);
211
+ })
212
+ .screenshots({
213
+ count: 1,
214
+ folder: path.dirname(videoPath),
215
+ filename: path.basename(thumbnailFilename),
216
+ timemarks: ['50%'],
217
+ });
218
+ });
219
+ }
220
+ _generateGif(videoPath) {
221
+ return new Promise((resolve, reject) => {
222
+ const gifFilename = videoPath.replace('.mp4', '.gif');
223
+ (0, fluent_ffmpeg_1.default)(videoPath)
224
+ .on('end', async () => {
225
+ this.logger.log(`GIF generated: ${gifFilename}`);
226
+ const gifBuffer = await fs.readFile(gifFilename);
227
+ const uploadResult = await this.cloudStorageService.uploadFileAndMakePublic(process.env.STORAGE_BUCKET, `generated-videos/${path.basename(gifFilename)}`, gifBuffer, 'image/gif');
228
+ if (uploadResult.url) {
229
+ resolve(uploadResult);
230
+ }
231
+ else {
232
+ reject(new Error('GIF upload failed, URL not returned.'));
233
+ }
234
+ })
235
+ .on('error', err => {
236
+ this.logger.error(`Error generating GIF: ${err.message}`);
237
+ reject(err);
238
+ })
239
+ .outputOptions(['-vf', 'fps=5,scale=180:-1:flags=lanczos', '-ss', '0', '-t', '5'])
240
+ .toFormat('gif')
241
+ .save(gifFilename);
242
+ });
243
+ }
177
244
  };
178
245
  exports.VertexVeoGenaiService = VertexVeoGenaiService;
179
246
  exports.VertexVeoGenaiService = VertexVeoGenaiService = __decorate([
@@ -8,4 +8,5 @@ export declare class GroqService {
8
8
  private getClientWithKey;
9
9
  private getExtensionFromMimeType;
10
10
  transcribeAudio(fileBuffer: Buffer, originalFileName: string, mimeType: string): Promise<Groq.Audio.Transcriptions.Transcription>;
11
+ transcribeUrl(url: string): Promise<any>;
11
12
  }
@@ -41,11 +41,15 @@ var __importStar = (this && this.__importStar) || (function () {
41
41
  var __metadata = (this && this.__metadata) || function (k, v) {
42
42
  if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
43
43
  };
44
+ var __importDefault = (this && this.__importDefault) || function (mod) {
45
+ return (mod && mod.__esModule) ? mod : { "default": mod };
46
+ };
44
47
  var GroqService_1;
45
48
  Object.defineProperty(exports, "__esModule", { value: true });
46
49
  exports.GroqService = void 0;
47
50
  const common_1 = require("@nestjs/common");
48
51
  const groq_sdk_1 = __importStar(require("groq-sdk"));
52
+ const axios_1 = __importDefault(require("axios"));
49
53
  const path = __importStar(require("path"));
50
54
  const key_balancer_api_service_1 = require("../key-balancer-api.service");
51
55
  const key_balancer_models_1 = require("../../models/key-balancer.models");
@@ -129,6 +133,21 @@ let GroqService = GroqService_1 = class GroqService {
129
133
  throw new Error(`Failed to transcribe audio: ${errorMessage}`);
130
134
  }
131
135
  }
136
+ async transcribeUrl(url) {
137
+ this.logger.log(`Fetching audio from URL: ${url}`);
138
+ try {
139
+ const response = await axios_1.default.get(url, { responseType: 'arraybuffer' });
140
+ const fileBuffer = Buffer.from(response.data);
141
+ const mimeType = response.headers['content-type'];
142
+ const fileName = path.basename(new URL(url).pathname);
143
+ this.logger.log(`Downloaded file: ${fileName}, mime: ${mimeType}, size: ${fileBuffer.length}`);
144
+ return this.transcribeAudio(fileBuffer, fileName, mimeType);
145
+ }
146
+ catch (error) {
147
+ this.logger.error(`Error fetching or processing audio from URL ${url}:`, error);
148
+ throw new Error(`Failed to process audio from URL: ${error.message}`);
149
+ }
150
+ }
132
151
  };
133
152
  exports.GroqService = GroqService;
134
153
  exports.GroqService = GroqService = GroqService_1 = __decorate([
@@ -4,4 +4,5 @@ export declare class LocalSttService {
4
4
  constructor();
5
5
  private getExtensionFromMimeType;
6
6
  transcribeAudio(fileBuffer: Buffer, originalFileName: string, mimeType: string): Promise<any>;
7
+ transcribeUrl(url: string): Promise<any>;
7
8
  }
@@ -41,12 +41,16 @@ var __importStar = (this && this.__importStar) || (function () {
41
41
  var __metadata = (this && this.__metadata) || function (k, v) {
42
42
  if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
43
43
  };
44
+ var __importDefault = (this && this.__importDefault) || function (mod) {
45
+ return (mod && mod.__esModule) ? mod : { "default": mod };
46
+ };
44
47
  var LocalSttService_1;
45
48
  Object.defineProperty(exports, "__esModule", { value: true });
46
49
  exports.LocalSttService = void 0;
47
50
  const common_1 = require("@nestjs/common");
48
51
  const openai_1 = __importStar(require("openai"));
49
52
  const path = __importStar(require("path"));
53
+ const axios_1 = __importDefault(require("axios"));
50
54
  let LocalSttService = LocalSttService_1 = class LocalSttService {
51
55
  logger = new common_1.Logger(LocalSttService_1.name);
52
56
  openai;
@@ -54,7 +58,7 @@ let LocalSttService = LocalSttService_1 = class LocalSttService {
54
58
  const aiServerHost = process.env.AI_LAB_HOST;
55
59
  console.log('Connecting to host AI_LAB_HOST', aiServerHost);
56
60
  this.openai = new openai_1.default({
57
- baseURL: `${aiServerHost}:3171/v1`,
61
+ baseURL: `${aiServerHost}:3337/v1`,
58
62
  apiKey: '',
59
63
  });
60
64
  }
@@ -129,6 +133,21 @@ let LocalSttService = LocalSttService_1 = class LocalSttService {
129
133
  throw new Error(`Failed to transcribe audio: ${errorMessage}`);
130
134
  }
131
135
  }
136
+ async transcribeUrl(url) {
137
+ this.logger.log(`Fetching audio from URL: ${url}`);
138
+ try {
139
+ const response = await axios_1.default.get(url, { responseType: 'arraybuffer' });
140
+ const fileBuffer = Buffer.from(response.data);
141
+ const mimeType = response.headers['content-type'];
142
+ const fileName = path.basename(new URL(url).pathname);
143
+ this.logger.log(`Downloaded file: ${fileName}, mime: ${mimeType}, size: ${fileBuffer.length}`);
144
+ return this.transcribeAudio(fileBuffer, fileName, mimeType);
145
+ }
146
+ catch (error) {
147
+ this.logger.error(`Error fetching or processing audio from URL ${url}:`, error);
148
+ throw new Error(`Failed to process audio from URL: ${error.message}`);
149
+ }
150
+ }
132
151
  };
133
152
  exports.LocalSttService = LocalSttService;
134
153
  exports.LocalSttService = LocalSttService = LocalSttService_1 = __decorate([