@dataclouder/nest-vertex 0.0.57 → 0.0.59

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -59,7 +59,7 @@ __decorate([
59
59
  __metadata("design:returntype", Promise)
60
60
  ], VertexImageVideoAdapterController.prototype, "generateImage", null);
61
61
  exports.VertexImageVideoAdapterController = VertexImageVideoAdapterController = __decorate([
62
- (0, swagger_1.ApiTags)('Image Generation Adapter'),
62
+ (0, swagger_1.ApiTags)('[Image] Adapter'),
63
63
  (0, common_1.Controller)('api/ai-services/image/adapter'),
64
64
  (0, common_2.UseFilters)(nest_core_1.AllExceptionsHandler),
65
65
  __metadata("design:paramtypes", [vertex_image_video_service_1.VertexImageVideoService,
@@ -44,7 +44,7 @@ __decorate([
44
44
  __metadata("design:returntype", Promise)
45
45
  ], GroqLlmController.prototype, "generateText", null);
46
46
  exports.GroqLlmController = GroqLlmController = GroqLlmController_1 = __decorate([
47
- (0, swagger_1.ApiTags)('LLM Groq'),
47
+ (0, swagger_1.ApiTags)('[LLM] Groq'),
48
48
  (0, common_1.Controller)('api/ai-services/groq/llm'),
49
49
  __metadata("design:paramtypes", [groq_llm_service_1.GroqLlmService])
50
50
  ], GroqLlmController);
@@ -66,7 +66,7 @@ __decorate([
66
66
  __metadata("design:returntype", Promise)
67
67
  ], VertexAdapterLLMController.prototype, "continueConversation", null);
68
68
  exports.VertexAdapterLLMController = VertexAdapterLLMController = VertexAdapterLLMController_1 = __decorate([
69
- (0, swagger_1.ApiTags)('AI Services Adapter LLM'),
69
+ (0, swagger_1.ApiTags)('[LLM] Adapter'),
70
70
  (0, common_1.Controller)('api/ai-services/adapter/llm'),
71
71
  (0, common_2.UseFilters)(nest_core_1.AllExceptionsHandler),
72
72
  __metadata("design:paramtypes", [adapter_llm_service_1.LLMAdapterService])
@@ -61,7 +61,7 @@ __decorate([
61
61
  __metadata("design:returntype", Promise)
62
62
  ], GeminiChatController.prototype, "continueConversation", null);
63
63
  exports.GeminiChatController = GeminiChatController = GeminiChatController_1 = __decorate([
64
- (0, swagger_1.ApiTags)('LLM Gemini '),
64
+ (0, swagger_1.ApiTags)('[LLM] Gemini'),
65
65
  (0, common_1.Controller)('api/ai-services/gemini'),
66
66
  (0, common_1.UseFilters)(nest_core_1.AllExceptionsHandler),
67
67
  __metadata("design:paramtypes", [vertex_gemini_chat_service_1.GeminiChatService])
@@ -0,0 +1,10 @@
1
+ import { type MulterFile } from '@webundsoehne/nest-fastify-file-upload';
2
+ import { GroqService } from '../../services/whisper/groq.service';
3
+ import { LocalSttService } from '../../services/whisper/local-stt.service';
4
+ export declare class AdapterSttController {
5
+ private readonly groqService;
6
+ private readonly localSttService;
7
+ private readonly logger;
8
+ constructor(groqService: GroqService, localSttService: LocalSttService);
9
+ processAudio(file: MulterFile, provider?: 'local' | 'groq'): Promise<any>;
10
+ }
@@ -0,0 +1,79 @@
1
+ "use strict";
2
+ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
3
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
4
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
5
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
6
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
7
+ };
8
+ var __metadata = (this && this.__metadata) || function (k, v) {
9
+ if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
10
+ };
11
+ var __param = (this && this.__param) || function (paramIndex, decorator) {
12
+ return function (target, key) { decorator(target, key, paramIndex); }
13
+ };
14
+ var AdapterSttController_1;
15
+ Object.defineProperty(exports, "__esModule", { value: true });
16
+ exports.AdapterSttController = void 0;
17
+ const common_1 = require("@nestjs/common");
18
+ const nest_fastify_file_upload_1 = require("@webundsoehne/nest-fastify-file-upload");
19
+ const swagger_1 = require("@nestjs/swagger");
20
+ const nest_core_1 = require("@dataclouder/nest-core");
21
+ const groq_service_1 = require("../../services/whisper/groq.service");
22
+ const local_stt_service_1 = require("../../services/whisper/local-stt.service");
23
+ let AdapterSttController = AdapterSttController_1 = class AdapterSttController {
24
+ groqService;
25
+ localSttService;
26
+ logger = new common_1.Logger(AdapterSttController_1.name);
27
+ constructor(groqService, localSttService) {
28
+ this.groqService = groqService;
29
+ this.localSttService = localSttService;
30
+ }
31
+ async processAudio(file, provider = 'local') {
32
+ this.logger.log(`Receive request with provider: ${provider}`);
33
+ if (!file || !file.buffer) {
34
+ this.logger.error('No file buffer received.');
35
+ return { error: 'No file uploaded or file buffer is missing.' };
36
+ }
37
+ this.logger.log(`Received file: ${file.originalname}, mimetype: ${file.mimetype}, size: ${file.size}`);
38
+ try {
39
+ let result;
40
+ if (provider === 'groq') {
41
+ result = await this.groqService.transcribeAudio(file.buffer, file.originalname, file.mimetype);
42
+ }
43
+ else {
44
+ result = await this.localSttService.transcribeAudio(file.buffer, file.originalname, file.mimetype);
45
+ }
46
+ return result;
47
+ }
48
+ catch (error) {
49
+ this.logger.error('Error during transcription process:', error);
50
+ return { error: 'Failed to transcribe audio.', details: error.message };
51
+ }
52
+ }
53
+ };
54
+ exports.AdapterSttController = AdapterSttController;
55
+ __decorate([
56
+ (0, common_1.Post)('transcribe-bytes'),
57
+ (0, common_1.UseInterceptors)((0, nest_fastify_file_upload_1.FileInterceptor)('file')),
58
+ (0, swagger_1.ApiConsumes)('multipart/form-data'),
59
+ (0, nest_fastify_file_upload_1.ApiFileBody)('file'),
60
+ (0, swagger_1.ApiQuery)({
61
+ name: 'provider',
62
+ enum: ['local', 'groq'],
63
+ required: false,
64
+ description: 'The provider to use for transcription. Defaults to "local".',
65
+ }),
66
+ __param(0, (0, common_1.UploadedFile)('file')),
67
+ __param(1, (0, common_1.Query)('provider')),
68
+ __metadata("design:type", Function),
69
+ __metadata("design:paramtypes", [Object, String]),
70
+ __metadata("design:returntype", Promise)
71
+ ], AdapterSttController.prototype, "processAudio", null);
72
+ exports.AdapterSttController = AdapterSttController = AdapterSttController_1 = __decorate([
73
+ (0, swagger_1.ApiTags)('[STT] Adapter'),
74
+ (0, common_1.Controller)('api/ai-services/adapter/stt'),
75
+ (0, common_1.UseFilters)(nest_core_1.AllExceptionsHandler),
76
+ __metadata("design:paramtypes", [groq_service_1.GroqService,
77
+ local_stt_service_1.LocalSttService])
78
+ ], AdapterSttController);
79
+ //# sourceMappingURL=adapter-stt.controller.js.map
@@ -54,8 +54,8 @@ __decorate([
54
54
  __metadata("design:returntype", Promise)
55
55
  ], GroqSttController.prototype, "processAudio", null);
56
56
  exports.GroqSttController = GroqSttController = GroqSttController_1 = __decorate([
57
- (0, swagger_1.ApiTags)('Speech-to-Text'),
58
- (0, common_1.Controller)('api/ai-services/groq-stt'),
57
+ (0, swagger_1.ApiTags)('[STT] Groq'),
58
+ (0, common_1.Controller)('api/ai-services/groq/stt'),
59
59
  (0, common_1.UseFilters)(nest_core_1.AllExceptionsHandler),
60
60
  __metadata("design:paramtypes", [groq_service_1.GroqService])
61
61
  ], GroqSttController);
@@ -54,8 +54,8 @@ __decorate([
54
54
  __metadata("design:returntype", Promise)
55
55
  ], LocalSttController.prototype, "processAudio", null);
56
56
  exports.LocalSttController = LocalSttController = LocalSttController_1 = __decorate([
57
- (0, swagger_1.ApiTags)('Speech-to-Text'),
58
- (0, common_1.Controller)('api/ai-services/local-stt'),
57
+ (0, swagger_1.ApiTags)('[STT] Local Speaches'),
58
+ (0, common_1.Controller)('api/ai-services/local/stt'),
59
59
  (0, common_1.UseFilters)(nest_core_1.AllExceptionsHandler),
60
60
  __metadata("design:paramtypes", [local_stt_service_1.LocalSttService])
61
61
  ], LocalSttController);
@@ -79,7 +79,7 @@ __decorate([
79
79
  __metadata("design:returntype", Promise)
80
80
  ], VertexGeminiTtsController.prototype, "synthesizeSpeech", null);
81
81
  exports.VertexGeminiTtsController = VertexGeminiTtsController = __decorate([
82
- (0, swagger_1.ApiTags)('Vertex Gemini TTS'),
82
+ (0, swagger_1.ApiTags)('Text To Speech Gemini'),
83
83
  (0, common_1.Controller)('api/vertex-gemini/tts'),
84
84
  (0, common_1.UseFilters)(nest_core_1.AllExceptionsHandler),
85
85
  __metadata("design:paramtypes", [vertex_gemini_tts_service_1.VertexGeminiTtsService])
@@ -104,7 +104,7 @@ __decorate([
104
104
  __metadata("design:returntype", Promise)
105
105
  ], VertexTtsAdapterController.prototype, "listVoices", null);
106
106
  exports.VertexTtsAdapterController = VertexTtsAdapterController = __decorate([
107
- (0, swagger_1.ApiTags)('Vertex TTS Adapter'),
107
+ (0, swagger_1.ApiTags)('[TTS] Adapter'),
108
108
  (0, common_1.Controller)('api/ai-services/adapter/tts'),
109
109
  (0, common_2.UseFilters)(nest_core_1.AllExceptionsHandler),
110
110
  __metadata("design:paramtypes", [vertex_tts_service_1.VertextTtsService])
@@ -105,7 +105,7 @@ __decorate([
105
105
  __metadata("design:returntype", Promise)
106
106
  ], VertexComfyController.prototype, "getVideo", null);
107
107
  exports.VertexComfyController = VertexComfyController = VertexComfyController_1 = __decorate([
108
- (0, swagger_1.ApiTags)('Vertex AI'),
108
+ (0, swagger_1.ApiTags)('[Comfy] Vertex AI Pending to categorize...'),
109
109
  (0, common_1.Controller)('api/vertex-comfy'),
110
110
  (0, common_1.UseFilters)(nest_core_1.AllExceptionsHandler),
111
111
  __metadata("design:paramtypes", [comfy_image_service_1.ComfyImageService,
@@ -40,7 +40,7 @@ __decorate([
40
40
  __metadata("design:returntype", Promise)
41
41
  ], VertexVeoGenerationController.prototype, "generateVeoVideo", null);
42
42
  exports.VertexVeoGenerationController = VertexVeoGenerationController = VertexVeoGenerationController_1 = __decorate([
43
- (0, swagger_1.ApiTags)('Vertex Image/video Adapter'),
43
+ (0, swagger_1.ApiTags)('[Video] VEO'),
44
44
  (0, common_1.Controller)('api/vertex-veo/video'),
45
45
  __metadata("design:paramtypes", [vertex_veo_genai_service_1.VertexVeoGenaiService])
46
46
  ], VertexVeoGenerationController);
@@ -8,7 +8,7 @@ import { GeneratedAssetService } from '../../services/generated-asset.service';
8
8
  import { ComfyVideoService } from '../../comfyui/services/comfy-video.service';
9
9
  import { VertexVeoGenaiService } from '../../services/vertex-veo-genai.service';
10
10
  import { ComfySDKService } from '../../comfyui/services/comfy-sdk.service';
11
- export declare class VertexVideoImageAdapterController {
11
+ export declare class VideoAdapterController {
12
12
  private readonly vertexImageVideoService;
13
13
  private readonly vertexVeoGcpService;
14
14
  private readonly generatedAssetService;
@@ -22,7 +22,7 @@ export declare class VertexVideoImageAdapterController {
22
22
  id: string;
23
23
  }[] | undefined>;
24
24
  generateVideo(generateVideoDto: GenerateVideoDto, provider?: string): Promise<any>;
25
- generateVideoFromAsset(id: string): Promise<import("../../models/generated-asset.entity").GeneratedAssetDocument | {
25
+ generateVideoFromAssetId(id: string): Promise<import("../../models/generated-asset.entity").GeneratedAssetDocument | {
26
26
  status: string;
27
27
  message: string;
28
28
  } | null>;
@@ -11,9 +11,9 @@ var __metadata = (this && this.__metadata) || function (k, v) {
11
11
  var __param = (this && this.__param) || function (paramIndex, decorator) {
12
12
  return function (target, key) { decorator(target, key, paramIndex); }
13
13
  };
14
- var VertexVideoImageAdapterController_1;
14
+ var VideoAdapterController_1;
15
15
  Object.defineProperty(exports, "__esModule", { value: true });
16
- exports.VertexVideoImageAdapterController = void 0;
16
+ exports.VideoAdapterController = void 0;
17
17
  const common_1 = require("@nestjs/common");
18
18
  const vertex_image_video_service_1 = require("../../services/vertex-image-video.service");
19
19
  const generate_video_dto_1 = require("../../dto/generate-video.dto");
@@ -25,14 +25,14 @@ const comfy_video_service_1 = require("../../comfyui/services/comfy-video.servic
25
25
  const vertex_veo_genai_service_1 = require("../../services/vertex-veo-genai.service");
26
26
  const comfy_sdk_service_1 = require("../../comfyui/services/comfy-sdk.service");
27
27
  const nest_core_1 = require("@dataclouder/nest-core");
28
- let VertexVideoImageAdapterController = VertexVideoImageAdapterController_1 = class VertexVideoImageAdapterController {
28
+ let VideoAdapterController = VideoAdapterController_1 = class VideoAdapterController {
29
29
  vertexImageVideoService;
30
30
  vertexVeoGcpService;
31
31
  generatedAssetService;
32
32
  comfyVideoService;
33
33
  vertexVeoGenaiService;
34
34
  comfySDKService;
35
- logger = new common_1.Logger(VertexVideoImageAdapterController_1.name);
35
+ logger = new common_1.Logger(VideoAdapterController_1.name);
36
36
  constructor(vertexImageVideoService, vertexVeoGcpService, generatedAssetService, comfyVideoService, vertexVeoGenaiService, comfySDKService) {
37
37
  this.vertexImageVideoService = vertexImageVideoService;
38
38
  this.vertexVeoGcpService = vertexVeoGcpService;
@@ -61,7 +61,7 @@ let VertexVideoImageAdapterController = VertexVideoImageAdapterController_1 = cl
61
61
  throw error;
62
62
  }
63
63
  }
64
- async generateVideoFromAsset(id) {
64
+ async generateVideoFromAssetId(id) {
65
65
  this.logger.log(`Received request to generate video from asset with id: ${id}`);
66
66
  try {
67
67
  const asset = await this.generatedAssetService.findOne(id);
@@ -139,7 +139,7 @@ let VertexVideoImageAdapterController = VertexVideoImageAdapterController_1 = cl
139
139
  }
140
140
  }
141
141
  };
142
- exports.VertexVideoImageAdapterController = VertexVideoImageAdapterController;
142
+ exports.VideoAdapterController = VideoAdapterController;
143
143
  __decorate([
144
144
  (0, common_1.Get)('list-models'),
145
145
  (0, swagger_1.ApiQuery)({ name: 'provider', required: false, type: String, description: 'The provider for the service (e.g., "local", "google")' }),
@@ -147,7 +147,7 @@ __decorate([
147
147
  __metadata("design:type", Function),
148
148
  __metadata("design:paramtypes", [String]),
149
149
  __metadata("design:returntype", Promise)
150
- ], VertexVideoImageAdapterController.prototype, "listModels", null);
150
+ ], VideoAdapterController.prototype, "listModels", null);
151
151
  __decorate([
152
152
  (0, common_1.Post)('generate-video'),
153
153
  (0, swagger_1.ApiQuery)({ name: 'provider', required: false, type: String, description: 'The provider for the video generation service (e.g., "google")' }),
@@ -156,35 +156,35 @@ __decorate([
156
156
  __metadata("design:type", Function),
157
157
  __metadata("design:paramtypes", [generate_video_dto_1.GenerateVideoDto, String]),
158
158
  __metadata("design:returntype", Promise)
159
- ], VertexVideoImageAdapterController.prototype, "generateVideo", null);
159
+ ], VideoAdapterController.prototype, "generateVideo", null);
160
160
  __decorate([
161
- (0, common_1.Post)('generate-video-from-asset'),
162
- __param(0, (0, common_1.Body)('id')),
161
+ (0, common_1.Post)('generate-video-from-asset-id/:id'),
162
+ __param(0, (0, common_1.Param)('id')),
163
163
  __metadata("design:type", Function),
164
164
  __metadata("design:paramtypes", [String]),
165
165
  __metadata("design:returntype", Promise)
166
- ], VertexVideoImageAdapterController.prototype, "generateVideoFromAsset", null);
166
+ ], VideoAdapterController.prototype, "generateVideoFromAssetId", null);
167
167
  __decorate([
168
168
  (0, common_1.Get)('list-models-vertex'),
169
169
  (0, swagger_1.ApiQuery)({ name: 'provider', required: false, type: String, description: 'The provider for the service (e.g., "local", "google")' }),
170
170
  __metadata("design:type", Function),
171
171
  __metadata("design:paramtypes", []),
172
172
  __metadata("design:returntype", Promise)
173
- ], VertexVideoImageAdapterController.prototype, "listModelsVertex", null);
173
+ ], VideoAdapterController.prototype, "listModelsVertex", null);
174
174
  __decorate([
175
175
  (0, common_1.Post)('test-video'),
176
176
  (0, swagger_1.ApiQuery)({ name: 'provider', required: false, type: String, description: 'The provider for the video generation service (e.g., "google")' }),
177
177
  __metadata("design:type", Function),
178
178
  __metadata("design:paramtypes", []),
179
179
  __metadata("design:returntype", Promise)
180
- ], VertexVideoImageAdapterController.prototype, "testVideo", null);
180
+ ], VideoAdapterController.prototype, "testVideo", null);
181
181
  __decorate([
182
182
  (0, common_1.Post)('generate-veo-video'),
183
183
  __param(0, (0, common_1.Body)()),
184
184
  __metadata("design:type", Function),
185
185
  __metadata("design:paramtypes", [generate_video_dto_1.GenerateVideoDto]),
186
186
  __metadata("design:returntype", Promise)
187
- ], VertexVideoImageAdapterController.prototype, "generateVeoVideo", null);
187
+ ], VideoAdapterController.prototype, "generateVeoVideo", null);
188
188
  __decorate([
189
189
  (0, common_1.Get)('video-status/:operationName'),
190
190
  (0, swagger_1.ApiQuery)({ name: 'provider', required: false, type: String, description: 'The provider for the video status service (e.g., "google")' }),
@@ -193,16 +193,16 @@ __decorate([
193
193
  __metadata("design:type", Function),
194
194
  __metadata("design:paramtypes", [String, String]),
195
195
  __metadata("design:returntype", Promise)
196
- ], VertexVideoImageAdapterController.prototype, "getVideoStatus", null);
196
+ ], VideoAdapterController.prototype, "getVideoStatus", null);
197
197
  __decorate([
198
198
  (0, common_1.Post)('veo-video-status'),
199
199
  __param(0, (0, common_1.Body)(new common_1.ValidationPipe({ transform: true, whitelist: true }))),
200
200
  __metadata("design:type", Function),
201
201
  __metadata("design:paramtypes", [get_veo_video_status_dto_1.GetVeoVideoStatusDto]),
202
202
  __metadata("design:returntype", Promise)
203
- ], VertexVideoImageAdapterController.prototype, "getVeoVideoStatus", null);
204
- exports.VertexVideoImageAdapterController = VertexVideoImageAdapterController = VertexVideoImageAdapterController_1 = __decorate([
205
- (0, swagger_1.ApiTags)('Vertex Image/video Adapter'),
203
+ ], VideoAdapterController.prototype, "getVeoVideoStatus", null);
204
+ exports.VideoAdapterController = VideoAdapterController = VideoAdapterController_1 = __decorate([
205
+ (0, swagger_1.ApiTags)('[VIDEO] Adapter'),
206
206
  (0, common_1.Controller)('api/vertex-adapter/video'),
207
207
  (0, common_1.UseFilters)(nest_core_1.AllExceptionsHandler),
208
208
  __metadata("design:paramtypes", [vertex_image_video_service_1.VertexImageVideoService,
@@ -211,5 +211,5 @@ exports.VertexVideoImageAdapterController = VertexVideoImageAdapterController =
211
211
  comfy_video_service_1.ComfyVideoService,
212
212
  vertex_veo_genai_service_1.VertexVeoGenaiService,
213
213
  comfy_sdk_service_1.ComfySDKService])
214
- ], VertexVideoImageAdapterController);
215
- //# sourceMappingURL=video-gen-adapter.controller.js.map
214
+ ], VideoAdapterController);
215
+ //# sourceMappingURL=video-adapter.controller.js.map
@@ -0,0 +1,3 @@
1
+ export declare class GenerateVideoFromAssetIdDto {
2
+ id: string;
3
+ }
@@ -0,0 +1,28 @@
1
+ "use strict";
2
+ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
3
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
4
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
5
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
6
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
7
+ };
8
+ var __metadata = (this && this.__metadata) || function (k, v) {
9
+ if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
10
+ };
11
+ Object.defineProperty(exports, "__esModule", { value: true });
12
+ exports.GenerateVideoFromAssetIdDto = void 0;
13
+ const swagger_1 = require("@nestjs/swagger");
14
+ const class_validator_1 = require("class-validator");
15
+ class GenerateVideoFromAssetIdDto {
16
+ id;
17
+ }
18
+ exports.GenerateVideoFromAssetIdDto = GenerateVideoFromAssetIdDto;
19
+ __decorate([
20
+ (0, swagger_1.ApiProperty)({
21
+ description: 'The ID of the asset to generate the video from.',
22
+ default: '69012cbccce4e10953f4b070',
23
+ }),
24
+ (0, class_validator_1.IsString)(),
25
+ (0, class_validator_1.IsNotEmpty)(),
26
+ __metadata("design:type", String)
27
+ ], GenerateVideoFromAssetIdDto.prototype, "id", void 0);
28
+ //# sourceMappingURL=generate-video-from-asset-id.dto.js.map
@@ -21,7 +21,7 @@ export interface ChatJsonResponse extends MessageLLM {
21
21
  export interface ChatLLMRequestAdapter {
22
22
  messages: MessageLLM[];
23
23
  provider?: string;
24
- model?: IAIModel;
24
+ model?: Partial<IAIModel>;
25
25
  returnJson?: boolean;
26
26
  keyType?: TierType;
27
27
  }
@@ -1,2 +1,9 @@
1
+ import { DynamicModule } from '@nestjs/common';
2
+ import { AiServicesClientOptions } from './services/ai-services.client';
3
+ interface NestVertexModuleOptions extends AiServicesClientOptions {
4
+ registerControllers?: boolean;
5
+ }
1
6
  export declare class NestVertexModule {
7
+ static forRoot(options: NestVertexModuleOptions): DynamicModule;
2
8
  }
9
+ export {};
@@ -5,6 +5,7 @@ var __decorate = (this && this.__decorate) || function (decorators, target, key,
5
5
  else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
6
6
  return c > 3 && r && Object.defineProperty(target, key, r), r;
7
7
  };
8
+ var NestVertexModule_1;
8
9
  Object.defineProperty(exports, "__esModule", { value: true });
9
10
  exports.NestVertexModule = void 0;
10
11
  const common_1 = require("@nestjs/common");
@@ -44,41 +45,30 @@ const vertex_gemini_tts_service_1 = require("./services/vertex-gemini-tts.servic
44
45
  const nest_auth_1 = require("@dataclouder/nest-auth");
45
46
  const groq_llm_service_1 = require("./services/llm/groq-llm.service");
46
47
  const groq_llm_controller_1 = require("./controllers/llm/groq-llm.controller");
47
- const video_gen_adapter_controller_1 = require("./controllers/video/video-gen-adapter.controller");
48
- let NestVertexModule = class NestVertexModule {
49
- };
50
- exports.NestVertexModule = NestVertexModule;
51
- exports.NestVertexModule = NestVertexModule = __decorate([
52
- (0, common_1.Module)({
53
- imports: [
54
- nest_storage_1.NestStorageModule,
55
- axios_1.HttpModule,
56
- nest_mongo_1.DCMongoDBModule,
57
- mongoose_1.MongooseModule.forFeature([{ name: generated_asset_entity_1.GeneratedAsset.name, schema: generated_asset_entity_1.GeneratedAssetSchema }]),
58
- comfyui_module_1.ComfyUIModule,
59
- nest_auth_1.NestAuthModule,
60
- ],
61
- providers: [
62
- vertex_image_video_service_1.VertexImageVideoService,
63
- vertex_tts_service_1.VertextTtsService,
64
- vertex_gemini_chat_service_1.GeminiChatService,
65
- vertex_image_service_1.ImageVertexService,
66
- ai_sdk_chat_service_1.AIChatService,
67
- adapter_llm_service_1.LLMAdapterService,
68
- adapter_image_gen_service_1.ImageGenAdapterService,
69
- adapter_audio_gen_service_1.AudioGenAdapterService,
70
- vertex_veo_gcp_service_1.VertexVeoGcpService,
71
- generated_asset_service_1.GeneratedAssetService,
72
- key_balancer_api_service_1.KeyBalancerClientService,
73
- vertex_veo_genai_service_1.VertexVeoGenaiService,
74
- google_genai_service_1.GoogleGenaiService,
75
- groq_service_1.GroqService,
76
- local_stt_service_1.LocalSttService,
77
- comfy_sdk_service_1.ComfySDKService,
78
- vertex_gemini_tts_service_1.VertexGeminiTtsService,
79
- groq_llm_service_1.GroqLlmService,
80
- ],
81
- exports: [
48
+ const video_adapter_controller_1 = require("./controllers/video/video-adapter.controller");
49
+ const adapter_stt_controller_1 = require("./controllers/stt/adapter-stt.controller");
50
+ const ai_services_client_1 = require("./services/ai-services.client");
51
+ let NestVertexModule = NestVertexModule_1 = class NestVertexModule {
52
+ static forRoot(options) {
53
+ const { registerControllers = true, ...clientOptions } = options;
54
+ const controllers = registerControllers
55
+ ? [
56
+ vertex_image_adapter_controller_1.VertexImageVideoAdapterController,
57
+ llm_gemini_chat_controller_1.GeminiChatController,
58
+ groq_llm_controller_1.GroqLlmController,
59
+ video_adapter_controller_1.VideoAdapterController,
60
+ vertex_tts_adapter_controller_1.VertexTtsAdapterController,
61
+ llm_adapter_controller_1.VertexAdapterLLMController,
62
+ generated_assets_controller_1.GeneratedAssetsController,
63
+ vertex_comfy_controller_1.VertexComfyController,
64
+ veo_video_controller_1.VertexVeoGenerationController,
65
+ groq_stt_controller_1.GroqSttController,
66
+ local_stt_controller_1.LocalSttController,
67
+ vertex_gemini_tts_controller_1.VertexGeminiTtsController,
68
+ adapter_stt_controller_1.AdapterSttController,
69
+ ]
70
+ : [];
71
+ const providers = [
82
72
  vertex_image_video_service_1.VertexImageVideoService,
83
73
  vertex_tts_service_1.VertextTtsService,
84
74
  vertex_gemini_chat_service_1.GeminiChatService,
@@ -94,25 +84,60 @@ exports.NestVertexModule = NestVertexModule = __decorate([
94
84
  google_genai_service_1.GoogleGenaiService,
95
85
  groq_service_1.GroqService,
96
86
  local_stt_service_1.LocalSttService,
97
- comfyui_module_1.ComfyUIModule,
98
87
  comfy_sdk_service_1.ComfySDKService,
99
88
  vertex_gemini_tts_service_1.VertexGeminiTtsService,
100
89
  groq_llm_service_1.GroqLlmService,
101
- ],
102
- controllers: [
103
- vertex_image_adapter_controller_1.VertexImageVideoAdapterController,
104
- llm_gemini_chat_controller_1.GeminiChatController,
105
- groq_llm_controller_1.GroqLlmController,
106
- video_gen_adapter_controller_1.VertexVideoImageAdapterController,
107
- vertex_tts_adapter_controller_1.VertexTtsAdapterController,
108
- llm_adapter_controller_1.VertexAdapterLLMController,
109
- generated_assets_controller_1.GeneratedAssetsController,
110
- vertex_comfy_controller_1.VertexComfyController,
111
- veo_video_controller_1.VertexVeoGenerationController,
112
- groq_stt_controller_1.GroqSttController,
113
- local_stt_controller_1.LocalSttController,
114
- vertex_gemini_tts_controller_1.VertexGeminiTtsController,
115
- ],
116
- })
90
+ {
91
+ provide: ai_services_client_1.AI_SERVICES_CLIENT_OPTIONS,
92
+ useValue: clientOptions,
93
+ },
94
+ {
95
+ provide: ai_services_client_1.AiServicesClient,
96
+ useFactory: (httpService, opts) => {
97
+ return new ai_services_client_1.AiServicesClient(httpService, opts);
98
+ },
99
+ inject: [axios_1.HttpService, ai_services_client_1.AI_SERVICES_CLIENT_OPTIONS],
100
+ },
101
+ ];
102
+ return {
103
+ module: NestVertexModule_1,
104
+ imports: [
105
+ nest_storage_1.NestStorageModule,
106
+ axios_1.HttpModule,
107
+ nest_mongo_1.DCMongoDBModule,
108
+ mongoose_1.MongooseModule.forFeature([{ name: generated_asset_entity_1.GeneratedAsset.name, schema: generated_asset_entity_1.GeneratedAssetSchema }]),
109
+ comfyui_module_1.ComfyUIModule,
110
+ nest_auth_1.NestAuthModule,
111
+ ],
112
+ providers,
113
+ exports: [
114
+ vertex_image_video_service_1.VertexImageVideoService,
115
+ vertex_tts_service_1.VertextTtsService,
116
+ vertex_gemini_chat_service_1.GeminiChatService,
117
+ vertex_image_service_1.ImageVertexService,
118
+ ai_sdk_chat_service_1.AIChatService,
119
+ adapter_llm_service_1.LLMAdapterService,
120
+ adapter_image_gen_service_1.ImageGenAdapterService,
121
+ adapter_audio_gen_service_1.AudioGenAdapterService,
122
+ vertex_veo_gcp_service_1.VertexVeoGcpService,
123
+ generated_asset_service_1.GeneratedAssetService,
124
+ key_balancer_api_service_1.KeyBalancerClientService,
125
+ vertex_veo_genai_service_1.VertexVeoGenaiService,
126
+ google_genai_service_1.GoogleGenaiService,
127
+ groq_service_1.GroqService,
128
+ local_stt_service_1.LocalSttService,
129
+ comfyui_module_1.ComfyUIModule,
130
+ comfy_sdk_service_1.ComfySDKService,
131
+ vertex_gemini_tts_service_1.VertexGeminiTtsService,
132
+ groq_llm_service_1.GroqLlmService,
133
+ ai_services_client_1.AiServicesClient,
134
+ ],
135
+ controllers,
136
+ };
137
+ }
138
+ };
139
+ exports.NestVertexModule = NestVertexModule;
140
+ exports.NestVertexModule = NestVertexModule = NestVertexModule_1 = __decorate([
141
+ (0, common_1.Module)({})
117
142
  ], NestVertexModule);
118
143
  //# sourceMappingURL=nest-vertex.module.js.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@dataclouder/nest-vertex",
3
- "version": "0.0.57",
3
+ "version": "0.0.59",
4
4
  "description": "NestJS Vertex AI library for Dataclouder",
5
5
  "author": "dataclouder",
6
6
  "license": "MIT",
@@ -17,7 +17,9 @@
17
17
  "peerDependencies": {
18
18
  "@nestjs/common": ">=11.0.0",
19
19
  "@nestjs/core": ">=11.0.0",
20
- "rxjs": ">=7.0.0"
20
+ "rxjs": ">=7.0.0",
21
+ "@nestjs/axios": ">=1.0.0",
22
+ "axios": ">=1.0.0"
21
23
  },
22
24
  "publishConfig": {
23
25
  "access": "public"
@@ -0,0 +1,46 @@
1
+ import { HttpService } from '@nestjs/axios';
2
+ import { GenerateTextDto } from '../dto/generate-text.dto';
3
+ import { GenerateVideoDto } from '../dto/generate-video.dto';
4
+ import { PromptDto } from '../dto/prompt.dto';
5
+ export declare const AI_SERVICES_CLIENT_OPTIONS = "AI_SERVICES_CLIENT_OPTIONS";
6
+ export interface AiServicesClientOptions {
7
+ apiBaseUrl: string;
8
+ apiKey?: string;
9
+ }
10
+ export declare class AiServicesClient {
11
+ private readonly httpService;
12
+ private readonly options;
13
+ video: Video;
14
+ llm: Llm;
15
+ image: Image;
16
+ tts: Tts;
17
+ stt: Stt;
18
+ constructor(httpService: HttpService, options: AiServicesClientOptions);
19
+ }
20
+ declare class BaseService {
21
+ protected readonly httpService: HttpService;
22
+ protected readonly options: AiServicesClientOptions;
23
+ constructor(httpService: HttpService, options: AiServicesClientOptions);
24
+ protected post<T>(endpoint: string, data: any): Promise<T>;
25
+ protected get<T>(endpoint: string): Promise<T>;
26
+ }
27
+ declare class Video extends BaseService {
28
+ generate(dto: GenerateVideoDto): Promise<any>;
29
+ generateFromAsset(id: string): Promise<any>;
30
+ generateVeo(dto: GenerateVideoDto): Promise<any>;
31
+ getStatus(operationName: string): Promise<any>;
32
+ getVeoStatus(operationName: string): Promise<any>;
33
+ }
34
+ declare class Llm extends BaseService {
35
+ chat(dto: GenerateTextDto): Promise<any>;
36
+ }
37
+ declare class Image extends BaseService {
38
+ generate(dto: PromptDto): Promise<any>;
39
+ }
40
+ declare class Tts extends BaseService {
41
+ synthesize(dto: any): Promise<any>;
42
+ }
43
+ declare class Stt extends BaseService {
44
+ transcribe(dto: any): Promise<any>;
45
+ }
46
+ export {};
@@ -0,0 +1,114 @@
1
+ "use strict";
2
+ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
3
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
4
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
5
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
6
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
7
+ };
8
+ var __metadata = (this && this.__metadata) || function (k, v) {
9
+ if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
10
+ };
11
+ var __param = (this && this.__param) || function (paramIndex, decorator) {
12
+ return function (target, key) { decorator(target, key, paramIndex); }
13
+ };
14
+ Object.defineProperty(exports, "__esModule", { value: true });
15
+ exports.AiServicesClient = exports.AI_SERVICES_CLIENT_OPTIONS = void 0;
16
+ const common_1 = require("@nestjs/common");
17
+ const axios_1 = require("@nestjs/axios");
18
+ const rxjs_1 = require("rxjs");
19
+ exports.AI_SERVICES_CLIENT_OPTIONS = 'AI_SERVICES_CLIENT_OPTIONS';
20
+ let AiServicesClient = class AiServicesClient {
21
+ httpService;
22
+ options;
23
+ video;
24
+ llm;
25
+ image;
26
+ tts;
27
+ stt;
28
+ constructor(httpService, options) {
29
+ this.httpService = httpService;
30
+ this.options = options;
31
+ this.video = new Video(this.httpService, this.options);
32
+ this.llm = new Llm(this.httpService, this.options);
33
+ this.image = new Image(this.httpService, this.options);
34
+ this.tts = new Tts(this.httpService, this.options);
35
+ this.stt = new Stt(this.httpService, this.options);
36
+ }
37
+ };
38
+ exports.AiServicesClient = AiServicesClient;
39
+ exports.AiServicesClient = AiServicesClient = __decorate([
40
+ (0, common_1.Injectable)(),
41
+ __param(1, (0, common_1.Inject)(exports.AI_SERVICES_CLIENT_OPTIONS)),
42
+ __metadata("design:paramtypes", [axios_1.HttpService, Object])
43
+ ], AiServicesClient);
44
+ class BaseService {
45
+ httpService;
46
+ options;
47
+ constructor(httpService, options) {
48
+ this.httpService = httpService;
49
+ this.options = options;
50
+ }
51
+ async post(endpoint, data) {
52
+ const headers = {};
53
+ if (this.options.apiKey) {
54
+ headers['Authorization'] = `Bearer ${this.options.apiKey}`;
55
+ }
56
+ const observable = this.httpService.post(`${this.options.apiBaseUrl}${endpoint}`, data, { headers });
57
+ return (0, rxjs_1.lastValueFrom)(observable.pipe((0, rxjs_1.map)(response => response.data)));
58
+ }
59
+ async get(endpoint) {
60
+ const headers = {};
61
+ if (this.options.apiKey) {
62
+ headers['Authorization'] = `Bearer ${this.options.apiKey}`;
63
+ }
64
+ const observable = this.httpService.get(`${this.options.apiBaseUrl}${endpoint}`, { headers });
65
+ return (0, rxjs_1.lastValueFrom)(observable.pipe((0, rxjs_1.map)(response => response.data)));
66
+ }
67
+ }
68
+ class Video extends BaseService {
69
+ async generate(dto) {
70
+ const endpoint = '/api/vertex-adapter/video/generate-video';
71
+ return this.post(endpoint, dto);
72
+ }
73
+ async generateFromAsset(id) {
74
+ const endpoint = '/api/vertex-adapter/video/generate-video-from-asset-id';
75
+ return this.post(endpoint, { id });
76
+ }
77
+ async generateVeo(dto) {
78
+ const endpoint = '/api/vertex-adapter/video/generate-veo-video';
79
+ return this.post(endpoint, dto);
80
+ }
81
+ async getStatus(operationName) {
82
+ const endpoint = `/api/vertex-adapter/video/video-status/${operationName}`;
83
+ return this.get(endpoint);
84
+ }
85
+ async getVeoStatus(operationName) {
86
+ const endpoint = '/api/vertex-adapter/video/veo-video-status';
87
+ return this.post(endpoint, { operationName });
88
+ }
89
+ }
90
+ class Llm extends BaseService {
91
+ async chat(dto) {
92
+ const endpoint = '/api/ai-services/adapter/llm/chat';
93
+ return this.post(endpoint, dto);
94
+ }
95
+ }
96
+ class Image extends BaseService {
97
+ async generate(dto) {
98
+ const endpoint = '/api/ai-services/adapter/image/generate-image';
99
+ return this.post(endpoint, dto);
100
+ }
101
+ }
102
+ class Tts extends BaseService {
103
+ async synthesize(dto) {
104
+ const endpoint = '/api/ai-services/adapter/tts/synthesize';
105
+ return this.post(endpoint, dto);
106
+ }
107
+ }
108
+ class Stt extends BaseService {
109
+ async transcribe(dto) {
110
+ const endpoint = '/api/ai-services/adapter/stt/transcribe';
111
+ return this.post(endpoint, dto);
112
+ }
113
+ }
114
+ //# sourceMappingURL=ai-services.client.js.map
@@ -15,6 +15,7 @@ export declare class GeminiChatService {
15
15
  chatStream(messages: MessageLLM[], model?: string, keyType?: TierType): Promise<AsyncIterable<ChatMessageDict>>;
16
16
  listModels(): Promise<Record<string, string>[]>;
17
17
  getDefaultQualityModel(quality: EModelQuality): string;
18
+ private getBestModel;
18
19
  private _extractJsonWithRecovery;
19
20
  chatAndExtractJson(messages: MessageLLM[], model?: string, keyType?: TierType): Promise<ChatJsonResponse>;
20
21
  describeImageByUrl(dto: DescribeImageRequestAdapter): Promise<any>;
@@ -93,8 +93,8 @@ let GeminiChatService = GeminiChatService_1 = class GeminiChatService {
93
93
  this.logger.debug(`Received Gemini response text. ${response?.text.slice(0, 50).replace(/\n/g, '')} ...`);
94
94
  const responseText = response?.text ?? '';
95
95
  const tokens = {
96
- input: response?.usageMetadata?.candidatesTokenCount,
97
- output: response?.usageMetadata?.promptTokenCount,
96
+ input: response?.usageMetadata?.promptTokenCount,
97
+ output: response?.usageMetadata?.candidatesTokenCount,
98
98
  total: response?.usageMetadata?.totalTokenCount,
99
99
  };
100
100
  return {
@@ -201,6 +201,19 @@ let GeminiChatService = GeminiChatService_1 = class GeminiChatService {
201
201
  return gemini_models_1.GeminiModels.Gemini2_5Pro;
202
202
  }
203
203
  }
204
+ getBestModel(conversation) {
205
+ if (!conversation.model) {
206
+ return { provider: 'google', modelName: gemini_models_1.GeminiModels.Gemini2_5Lite };
207
+ }
208
+ const { quality, modelName, provider } = conversation.model;
209
+ if (quality) {
210
+ return { provider: 'google', modelName: this.getDefaultQualityModel(quality) };
211
+ }
212
+ if (modelName && provider) {
213
+ return { provider, modelName };
214
+ }
215
+ return { provider: 'google', modelName: gemini_models_1.GeminiModels.Gemini2_5Lite };
216
+ }
204
217
  async _extractJsonWithRecovery(responseText, model, keyType) {
205
218
  try {
206
219
  const json = (0, llm_models_1.extractJsonFromResponse)(responseText);
@@ -286,36 +299,30 @@ let GeminiChatService = GeminiChatService_1 = class GeminiChatService {
286
299
  }
287
300
  async chatWithConversation(conversation) {
288
301
  const startTime = Date.now();
289
- if (!conversation?.model) {
290
- conversation.model = { provider: 'google', modelName: gemini_models_1.GeminiModels.Gemini2_5Lite, id: 'no-id' };
291
- }
302
+ const { provider, modelName } = this.getBestModel(conversation);
292
303
  const tierType = conversation.tierType || key_balancer_models_1.TierType.TIER_1;
293
- if (conversation?.model?.quality) {
294
- conversation.model.provider = 'google';
295
- conversation.model.modelName = this.getDefaultQualityModel(conversation.model.quality);
296
- }
297
304
  const returnJson = conversation.returnJson;
298
305
  if (returnJson) {
299
- const obj = await this.chatAndExtractJson(conversation.messages, conversation.model.modelName, tierType);
306
+ const obj = await this.chatAndExtractJson(conversation.messages, modelName, tierType);
300
307
  const endTime = Date.now();
301
308
  const processTime = (endTime - startTime) / 1000;
302
309
  const metadata = {
303
310
  type: 'json',
304
- provider: conversation.model.provider,
305
- model: conversation.model.modelName,
311
+ provider,
312
+ model: modelName,
306
313
  processTime,
307
314
  ...obj?.metadata,
308
315
  };
309
316
  return { content: obj.json, role: adapter_models_1.ChatRole.Assistant, metadata };
310
317
  }
311
318
  else {
312
- const response = await this.chat(conversation.messages, conversation.model.modelName, tierType);
319
+ const response = await this.chat(conversation.messages, modelName, tierType);
313
320
  const endTime = Date.now();
314
321
  const processTime = (endTime - startTime) / 1000;
315
322
  return {
316
323
  content: response.content,
317
324
  role: response.role,
318
- metadata: { provider: conversation.model.provider, model: conversation.model.modelName, processTime, tokens: response.metadata.tokens },
325
+ metadata: { provider, model: modelName, processTime, tokens: response.metadata.tokens },
319
326
  };
320
327
  }
321
328
  }
@@ -51,8 +51,10 @@ let LocalSttService = LocalSttService_1 = class LocalSttService {
51
51
  logger = new common_1.Logger(LocalSttService_1.name);
52
52
  openai;
53
53
  constructor() {
54
+ const aiServerHost = process.env.AI_LAB_HOST;
55
+ console.log('Connecting to host AI_LAB_HOST', aiServerHost);
54
56
  this.openai = new openai_1.default({
55
- baseURL: 'http://192.168.2.224:8000/v1',
57
+ baseURL: `${aiServerHost}:3171/v1`,
56
58
  apiKey: '',
57
59
  });
58
60
  }
@@ -103,13 +105,22 @@ let LocalSttService = LocalSttService_1 = class LocalSttService {
103
105
  this.logger.warn(`Could not determine valid extension for mime type ${mimeType}. Using original filename: ${originalFileName}. Transcription may fail if the filename lacks a supported extension.`);
104
106
  }
105
107
  try {
108
+ console.log(' -> Request to...', this.openai.baseURL);
106
109
  const result = await this.openai.audio.transcriptions.create({
107
110
  model: 'rtlingo/mobiuslabsgmbh-faster-whisper-large-v3-turbo',
108
111
  file: await (0, openai_1.toFile)(fileBuffer, effectiveFileName, { type: mimeType }),
109
112
  response_format: 'verbose_json',
110
113
  timestamp_granularities: ['word'],
111
114
  });
112
- this.logger.log(`Transcription successful for file: ${effectiveFileName}`);
115
+ if (result?.segments) {
116
+ delete result.segments;
117
+ }
118
+ if (result?.words?.length) {
119
+ result.words.forEach((word) => {
120
+ delete word?.probability;
121
+ });
122
+ }
123
+ this.logger.log(`Transcription successful for file: ${result.text}, lang: ${result.language}, duration: ${result.duration}`);
113
124
  return result;
114
125
  }
115
126
  catch (error) {