django-cfg 1.5.14__py3-none-any.whl → 1.5.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of django-cfg might be problematic. Click here for more details.

Files changed (53) hide show
  1. django_cfg/__init__.py +1 -1
  2. django_cfg/apps/business/accounts/serializers/profile.py +42 -0
  3. django_cfg/apps/business/support/serializers.py +3 -2
  4. django_cfg/apps/integrations/centrifugo/apps.py +2 -1
  5. django_cfg/apps/integrations/centrifugo/codegen/generators/typescript_thin/templates/rpc-client.ts.j2 +151 -12
  6. django_cfg/apps/integrations/centrifugo/management/commands/generate_centrifugo_clients.py +2 -2
  7. django_cfg/apps/integrations/centrifugo/services/__init__.py +6 -0
  8. django_cfg/apps/integrations/centrifugo/services/client/__init__.py +6 -1
  9. django_cfg/apps/integrations/centrifugo/services/client/direct_client.py +282 -0
  10. django_cfg/apps/integrations/centrifugo/services/publisher.py +371 -0
  11. django_cfg/apps/integrations/centrifugo/services/token_generator.py +122 -0
  12. django_cfg/apps/integrations/centrifugo/urls.py +8 -0
  13. django_cfg/apps/integrations/centrifugo/views/__init__.py +2 -0
  14. django_cfg/apps/integrations/centrifugo/views/testing_api.py +0 -79
  15. django_cfg/apps/integrations/centrifugo/views/token_api.py +101 -0
  16. django_cfg/apps/integrations/centrifugo/views/wrapper.py +257 -0
  17. django_cfg/apps/integrations/grpc/centrifugo/__init__.py +29 -0
  18. django_cfg/apps/integrations/grpc/centrifugo/bridge.py +277 -0
  19. django_cfg/apps/integrations/grpc/centrifugo/config.py +167 -0
  20. django_cfg/apps/integrations/grpc/centrifugo/demo.py +626 -0
  21. django_cfg/apps/integrations/grpc/centrifugo/test_publish.py +229 -0
  22. django_cfg/apps/integrations/grpc/centrifugo/transformers.py +89 -0
  23. django_cfg/apps/integrations/grpc/interceptors/__init__.py +3 -1
  24. django_cfg/apps/integrations/grpc/interceptors/centrifugo.py +541 -0
  25. django_cfg/apps/integrations/grpc/management/commands/compile_proto.py +105 -0
  26. django_cfg/apps/integrations/grpc/management/commands/generate_protos.py +55 -0
  27. django_cfg/apps/integrations/grpc/management/commands/rungrpc.py +311 -7
  28. django_cfg/apps/integrations/grpc/management/proto/__init__.py +3 -0
  29. django_cfg/apps/integrations/grpc/management/proto/compiler.py +194 -0
  30. django_cfg/apps/integrations/grpc/services/discovery.py +7 -1
  31. django_cfg/apps/integrations/grpc/utils/SERVER_LOGGING.md +164 -0
  32. django_cfg/apps/integrations/grpc/utils/streaming_logger.py +206 -5
  33. django_cfg/apps/system/dashboard/serializers/config.py +95 -9
  34. django_cfg/apps/system/dashboard/serializers/statistics.py +9 -4
  35. django_cfg/apps/system/frontend/views.py +87 -6
  36. django_cfg/core/builders/security_builder.py +1 -0
  37. django_cfg/core/generation/integration_generators/api.py +2 -0
  38. django_cfg/modules/django_client/core/generator/typescript/generator.py +26 -0
  39. django_cfg/modules/django_client/core/generator/typescript/hooks_generator.py +7 -1
  40. django_cfg/modules/django_client/core/generator/typescript/models_generator.py +5 -0
  41. django_cfg/modules/django_client/core/generator/typescript/schemas_generator.py +11 -0
  42. django_cfg/modules/django_client/core/generator/typescript/templates/fetchers/fetchers.ts.jinja +1 -0
  43. django_cfg/modules/django_client/core/generator/typescript/templates/fetchers/function.ts.jinja +29 -1
  44. django_cfg/modules/django_client/core/generator/typescript/templates/hooks/hooks.ts.jinja +4 -0
  45. django_cfg/modules/django_client/core/ir/schema.py +15 -1
  46. django_cfg/modules/django_client/core/parser/base.py +12 -0
  47. django_cfg/pyproject.toml +1 -1
  48. django_cfg/static/frontend/admin.zip +0 -0
  49. {django_cfg-1.5.14.dist-info → django_cfg-1.5.20.dist-info}/METADATA +1 -1
  50. {django_cfg-1.5.14.dist-info → django_cfg-1.5.20.dist-info}/RECORD +53 -37
  51. {django_cfg-1.5.14.dist-info → django_cfg-1.5.20.dist-info}/WHEEL +0 -0
  52. {django_cfg-1.5.14.dist-info → django_cfg-1.5.20.dist-info}/entry_points.txt +0 -0
  53. {django_cfg-1.5.14.dist-info → django_cfg-1.5.20.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,541 @@
1
+ """
2
+ Centrifugo Publishing Interceptor for gRPC.
3
+
4
+ Automatically publishes gRPC call metadata to Centrifugo WebSocket channels.
5
+ Works alongside CentrifugoBridgeMixin for complete event visibility.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import logging
11
+ import time
12
+ from datetime import datetime, timezone as tz
13
+ from typing import Callable, Optional, Any, Dict
14
+
15
+ import grpc
16
+ import grpc.aio
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class CentrifugoInterceptor(grpc.aio.ServerInterceptor):
22
+ """
23
+ Async gRPC interceptor that publishes call metadata to Centrifugo.
24
+
25
+ Automatically publishes:
26
+ - RPC method invocations (start/end)
27
+ - Timing information
28
+ - Status codes
29
+ - Message counts
30
+ - Error information
31
+ - Client peer information
32
+
33
+ Works in parallel with CentrifugoBridgeMixin:
34
+ - Interceptor: Publishes RPC-level metadata (method, timing, status)
35
+ - Mixin: Publishes message-level data (protobuf field contents)
36
+
37
+ Example:
38
+ ```python
39
+ # In Django settings
40
+ GRPC_FRAMEWORK = {
41
+ "SERVER_INTERCEPTORS": [
42
+ "django_cfg.apps.integrations.grpc.interceptors.CentrifugoInterceptor",
43
+ ]
44
+ }
45
+ ```
46
+
47
+ Channel naming:
48
+ - RPC calls: `grpc#{service}#{method}#meta`
49
+ - Errors: `grpc#{service}#{method}#errors`
50
+
51
+ Published metadata:
52
+ {
53
+ "event_type": "rpc_start" | "rpc_end" | "rpc_error",
54
+ "method": "/service.Service/Method",
55
+ "service": "service.Service",
56
+ "method_name": "Method",
57
+ "peer": "ipv4:127.0.0.1:12345",
58
+ "timestamp": "2025-11-05T...",
59
+ "duration_ms": 123.45, # Only on rpc_end
60
+ "status": "OK" | "ERROR",
61
+ "message_count": 10, # For streaming
62
+ "error": {...}, # Only on error
63
+ }
64
+ """
65
+
66
+ def __init__(
67
+ self,
68
+ enabled: bool = True,
69
+ publish_start: bool = False,
70
+ publish_end: bool = True,
71
+ publish_errors: bool = True,
72
+ publish_stream_messages: bool = False,
73
+ channel_template: str = "grpc#{service}#{method}#meta",
74
+ error_channel_template: str = "grpc#{service}#{method}#errors",
75
+ metadata: Optional[Dict[str, Any]] = None,
76
+ ):
77
+ """
78
+ Initialize Centrifugo interceptor.
79
+
80
+ Args:
81
+ enabled: Enable/disable publishing
82
+ publish_start: Publish RPC start events
83
+ publish_end: Publish RPC completion events
84
+ publish_errors: Publish RPC error events
85
+ publish_stream_messages: Publish each streaming message (can be noisy)
86
+ channel_template: Channel name template for metadata
87
+ error_channel_template: Channel name template for errors
88
+ metadata: Additional metadata to include in all events
89
+ """
90
+ self.enabled = enabled
91
+ self.publish_start = publish_start
92
+ self.publish_end = publish_end
93
+ self.publish_errors = publish_errors
94
+ self.publish_stream_messages = publish_stream_messages
95
+ self.channel_template = channel_template
96
+ self.error_channel_template = error_channel_template
97
+ self.metadata = metadata or {}
98
+
99
+ self._centrifugo_publisher: Optional[Any] = None
100
+ self._initialize_publisher()
101
+
102
+ def _initialize_publisher(self):
103
+ """Initialize Centrifugo publisher lazily with direct client."""
104
+ if not self.enabled:
105
+ logger.debug("CentrifugoInterceptor disabled")
106
+ return
107
+
108
+ try:
109
+ from django_cfg.apps.integrations.centrifugo.services import get_centrifugo_publisher
110
+ # Use Publisher with DirectClient (use_direct=True by default)
111
+ # This bypasses wrapper and goes directly to Centrifugo
112
+ self._centrifugo_publisher = get_centrifugo_publisher()
113
+ logger.info("CentrifugoInterceptor initialized with DirectCentrifugoClient")
114
+ except Exception as e:
115
+ logger.warning(
116
+ f"Failed to initialize Centrifugo publisher in interceptor: {e}. "
117
+ f"Interceptor will continue without publishing."
118
+ )
119
+ self.enabled = False
120
+
121
+ async def intercept_service(
122
+ self,
123
+ continuation: Callable,
124
+ handler_call_details: grpc.HandlerCallDetails,
125
+ ) -> grpc.RpcMethodHandler:
126
+ """
127
+ Intercept async gRPC service call for Centrifugo publishing.
128
+
129
+ Args:
130
+ continuation: Function to invoke the next interceptor or handler
131
+ handler_call_details: Details about the RPC call
132
+
133
+ Returns:
134
+ RPC method handler with Centrifugo publishing
135
+ """
136
+ if not self.enabled or not self._centrifugo_publisher:
137
+ # Pass through without interception
138
+ return await continuation(handler_call_details)
139
+
140
+ method_name = handler_call_details.method
141
+ peer = self._extract_peer(handler_call_details.invocation_metadata)
142
+ service_name, method_short = self._parse_method_name(method_name)
143
+
144
+ # Publish start event
145
+ if self.publish_start:
146
+ await self._publish_event(
147
+ event_type="rpc_start",
148
+ method=method_name,
149
+ service=service_name,
150
+ method_name=method_short,
151
+ peer=peer,
152
+ )
153
+
154
+ # Get handler and wrap it
155
+ handler = await continuation(handler_call_details)
156
+
157
+ if handler is None:
158
+ logger.warning(f"[CentrifugoInterceptor] No handler found for {method_name}")
159
+ return None
160
+
161
+ # Wrap handler methods to publish events
162
+ return self._wrap_handler(handler, method_name, service_name, method_short, peer)
163
+
164
+ def _wrap_handler(
165
+ self,
166
+ handler: grpc.RpcMethodHandler,
167
+ method_name: str,
168
+ service_name: str,
169
+ method_short: str,
170
+ peer: str,
171
+ ) -> grpc.RpcMethodHandler:
172
+ """
173
+ Wrap handler to add Centrifugo publishing.
174
+
175
+ Args:
176
+ handler: Original RPC method handler
177
+ method_name: Full gRPC method name
178
+ service_name: Service name
179
+ method_short: Short method name
180
+ peer: Client peer information
181
+
182
+ Returns:
183
+ Wrapped RPC method handler
184
+ """
185
+ # Determine handler type and wrap accordingly
186
+ if handler.unary_unary:
187
+ wrapped = self._wrap_unary_unary(
188
+ handler.unary_unary, method_name, service_name, method_short, peer
189
+ )
190
+ return grpc.unary_unary_rpc_method_handler(
191
+ wrapped,
192
+ request_deserializer=handler.request_deserializer,
193
+ response_serializer=handler.response_serializer,
194
+ )
195
+
196
+ if handler.unary_stream:
197
+ wrapped = self._wrap_unary_stream(
198
+ handler.unary_stream, method_name, service_name, method_short, peer
199
+ )
200
+ return grpc.unary_stream_rpc_method_handler(
201
+ wrapped,
202
+ request_deserializer=handler.request_deserializer,
203
+ response_serializer=handler.response_serializer,
204
+ )
205
+
206
+ if handler.stream_unary:
207
+ wrapped = self._wrap_stream_unary(
208
+ handler.stream_unary, method_name, service_name, method_short, peer
209
+ )
210
+ return grpc.stream_unary_rpc_method_handler(
211
+ wrapped,
212
+ request_deserializer=handler.request_deserializer,
213
+ response_serializer=handler.response_serializer,
214
+ )
215
+
216
+ if handler.stream_stream:
217
+ wrapped = self._wrap_stream_stream(
218
+ handler.stream_stream, method_name, service_name, method_short, peer
219
+ )
220
+ return grpc.stream_stream_rpc_method_handler(
221
+ wrapped,
222
+ request_deserializer=handler.request_deserializer,
223
+ response_serializer=handler.response_serializer,
224
+ )
225
+
226
+ return handler
227
+
228
+ def _wrap_unary_unary(self, behavior, method_name, service_name, method_short, peer):
229
+ """Wrap unary-unary RPC."""
230
+ async def wrapper(request, context):
231
+ start_time = time.time()
232
+ try:
233
+ response = await behavior(request, context)
234
+ duration = (time.time() - start_time) * 1000
235
+
236
+ if self.publish_end:
237
+ await self._publish_event(
238
+ event_type="rpc_end",
239
+ method=method_name,
240
+ service=service_name,
241
+ method_name=method_short,
242
+ peer=peer,
243
+ duration_ms=duration,
244
+ status="OK",
245
+ )
246
+
247
+ return response
248
+ except Exception as e:
249
+ duration = (time.time() - start_time) * 1000
250
+
251
+ if self.publish_errors:
252
+ await self._publish_error(
253
+ method=method_name,
254
+ service=service_name,
255
+ method_name=method_short,
256
+ peer=peer,
257
+ duration_ms=duration,
258
+ error=e,
259
+ )
260
+ raise
261
+
262
+ return wrapper
263
+
264
+ def _wrap_unary_stream(self, behavior, method_name, service_name, method_short, peer):
265
+ """Wrap unary-stream RPC."""
266
+ async def wrapper(request, context):
267
+ start_time = time.time()
268
+ message_count = 0
269
+ try:
270
+ async for response in behavior(request, context):
271
+ message_count += 1
272
+
273
+ if self.publish_stream_messages:
274
+ await self._publish_event(
275
+ event_type="stream_message",
276
+ method=method_name,
277
+ service=service_name,
278
+ method_name=method_short,
279
+ peer=peer,
280
+ message_count=message_count,
281
+ direction="server_to_client",
282
+ )
283
+
284
+ yield response
285
+
286
+ duration = (time.time() - start_time) * 1000
287
+
288
+ if self.publish_end:
289
+ await self._publish_event(
290
+ event_type="rpc_end",
291
+ method=method_name,
292
+ service=service_name,
293
+ method_name=method_short,
294
+ peer=peer,
295
+ duration_ms=duration,
296
+ status="OK",
297
+ message_count=message_count,
298
+ )
299
+
300
+ except Exception as e:
301
+ duration = (time.time() - start_time) * 1000
302
+
303
+ if self.publish_errors:
304
+ await self._publish_error(
305
+ method=method_name,
306
+ service=service_name,
307
+ method_name=method_short,
308
+ peer=peer,
309
+ duration_ms=duration,
310
+ error=e,
311
+ message_count=message_count,
312
+ )
313
+ raise
314
+
315
+ return wrapper
316
+
317
+ def _wrap_stream_unary(self, behavior, method_name, service_name, method_short, peer):
318
+ """Wrap stream-unary RPC."""
319
+ async def wrapper(request_iterator, context):
320
+ start_time = time.time()
321
+ message_count = 0
322
+ try:
323
+ # Count incoming messages
324
+ requests = []
325
+ async for req in request_iterator:
326
+ message_count += 1
327
+
328
+ if self.publish_stream_messages:
329
+ await self._publish_event(
330
+ event_type="stream_message",
331
+ method=method_name,
332
+ service=service_name,
333
+ method_name=method_short,
334
+ peer=peer,
335
+ message_count=message_count,
336
+ direction="client_to_server",
337
+ )
338
+
339
+ requests.append(req)
340
+
341
+ # Process
342
+ async def request_iter():
343
+ for r in requests:
344
+ yield r
345
+
346
+ response = await behavior(request_iter(), context)
347
+ duration = (time.time() - start_time) * 1000
348
+
349
+ if self.publish_end:
350
+ await self._publish_event(
351
+ event_type="rpc_end",
352
+ method=method_name,
353
+ service=service_name,
354
+ method_name=method_short,
355
+ peer=peer,
356
+ duration_ms=duration,
357
+ status="OK",
358
+ message_count=message_count,
359
+ )
360
+
361
+ return response
362
+ except Exception as e:
363
+ duration = (time.time() - start_time) * 1000
364
+
365
+ if self.publish_errors:
366
+ await self._publish_error(
367
+ method=method_name,
368
+ service=service_name,
369
+ method_name=method_short,
370
+ peer=peer,
371
+ duration_ms=duration,
372
+ error=e,
373
+ message_count=message_count,
374
+ )
375
+ raise
376
+
377
+ return wrapper
378
+
379
+ def _wrap_stream_stream(self, behavior, method_name, service_name, method_short, peer):
380
+ """Wrap bidirectional streaming RPC."""
381
+ async def wrapper(request_iterator, context):
382
+ start_time = time.time()
383
+ in_count = 0
384
+ out_count = 0
385
+ try:
386
+ # Wrap request iterator to count messages
387
+ async def counting_iterator():
388
+ nonlocal in_count
389
+ async for req in request_iterator:
390
+ in_count += 1
391
+
392
+ if self.publish_stream_messages:
393
+ await self._publish_event(
394
+ event_type="stream_message",
395
+ method=method_name,
396
+ service=service_name,
397
+ method_name=method_short,
398
+ peer=peer,
399
+ message_count=in_count,
400
+ direction="client_to_server",
401
+ )
402
+
403
+ yield req
404
+
405
+ # Stream responses
406
+ async for response in behavior(counting_iterator(), context):
407
+ out_count += 1
408
+
409
+ if self.publish_stream_messages:
410
+ await self._publish_event(
411
+ event_type="stream_message",
412
+ method=method_name,
413
+ service=service_name,
414
+ method_name=method_short,
415
+ peer=peer,
416
+ message_count=out_count,
417
+ direction="server_to_client",
418
+ )
419
+
420
+ yield response
421
+
422
+ duration = (time.time() - start_time) * 1000
423
+
424
+ if self.publish_end:
425
+ await self._publish_event(
426
+ event_type="rpc_end",
427
+ method=method_name,
428
+ service=service_name,
429
+ method_name=method_short,
430
+ peer=peer,
431
+ duration_ms=duration,
432
+ status="OK",
433
+ in_message_count=in_count,
434
+ out_message_count=out_count,
435
+ )
436
+
437
+ except Exception as e:
438
+ duration = (time.time() - start_time) * 1000
439
+
440
+ if self.publish_errors:
441
+ await self._publish_error(
442
+ method=method_name,
443
+ service=service_name,
444
+ method_name=method_short,
445
+ peer=peer,
446
+ duration_ms=duration,
447
+ error=e,
448
+ in_message_count=in_count,
449
+ out_message_count=out_count,
450
+ )
451
+ raise
452
+
453
+ return wrapper
454
+
455
+ async def _publish_event(self, **data):
456
+ """Publish event to Centrifugo via Publisher."""
457
+ try:
458
+ # Build channel name
459
+ channel = self.channel_template.format(
460
+ service=data.get('service', 'unknown'),
461
+ method=data.get('method_name', 'unknown'),
462
+ )
463
+
464
+ # Use Publisher's publish_grpc_event for type-safe gRPC events
465
+ await self._centrifugo_publisher.publish_grpc_event(
466
+ channel=channel,
467
+ method=data.get('method', ''),
468
+ status=data.get('status', 'UNKNOWN'),
469
+ duration_ms=data.get('duration_ms', 0.0),
470
+ peer=data.get('peer'),
471
+ metadata={
472
+ 'event_type': data.get('event_type'),
473
+ **self.metadata,
474
+ },
475
+ **{k: v for k, v in data.items() if k not in ['method', 'status', 'duration_ms', 'peer', 'event_type', 'service', 'method_name']},
476
+ )
477
+
478
+ logger.debug(f"Published gRPC event to {channel}: {data.get('event_type')}")
479
+
480
+ except Exception as e:
481
+ logger.warning(f"Failed to publish gRPC event to Centrifugo: {e}")
482
+
483
+ async def _publish_error(self, error: Exception, **data):
484
+ """Publish error to Centrifugo via Publisher."""
485
+ try:
486
+ # Build error channel name
487
+ channel = self.error_channel_template.format(
488
+ service=data.get('service', 'unknown'),
489
+ method=data.get('method_name', 'unknown'),
490
+ )
491
+
492
+ # Use Publisher's publish_grpc_event with error status
493
+ await self._centrifugo_publisher.publish_grpc_event(
494
+ channel=channel,
495
+ method=data.get('method', ''),
496
+ status='ERROR',
497
+ duration_ms=data.get('duration_ms', 0.0),
498
+ peer=data.get('peer'),
499
+ metadata={
500
+ 'event_type': 'rpc_error',
501
+ 'error': {
502
+ 'type': type(error).__name__,
503
+ 'message': str(error),
504
+ },
505
+ **self.metadata,
506
+ },
507
+ **{k: v for k, v in data.items() if k not in ['method', 'duration_ms', 'peer', 'error', 'service', 'method_name']},
508
+ )
509
+
510
+ logger.debug(f"Published gRPC error to {channel}")
511
+
512
+ except Exception as e:
513
+ logger.warning(f"Failed to publish gRPC error to Centrifugo: {e}")
514
+
515
+ @staticmethod
516
+ def _extract_peer(invocation_metadata) -> str:
517
+ """Extract peer information from metadata."""
518
+ if invocation_metadata:
519
+ for key, value in invocation_metadata:
520
+ if key == "x-forwarded-for":
521
+ return value
522
+ return "unknown"
523
+
524
+ @staticmethod
525
+ def _parse_method_name(full_method: str) -> tuple[str, str]:
526
+ """
527
+ Parse full gRPC method name.
528
+
529
+ Args:
530
+ full_method: e.g., "/trading_bots.BotStreamingService/ConnectBot"
531
+
532
+ Returns:
533
+ (service_name, method_name): ("trading_bots.BotStreamingService", "ConnectBot")
534
+ """
535
+ parts = full_method.strip("/").split("/")
536
+ if len(parts) == 2:
537
+ return parts[0], parts[1]
538
+ return "unknown", full_method
539
+
540
+
541
+ __all__ = ["CentrifugoInterceptor"]
@@ -0,0 +1,105 @@
1
+ """
2
+ Django management command to compile .proto files to Python.
3
+
4
+ Usage:
5
+ # Compile single proto file
6
+ python manage.py compile_proto path/to/file.proto
7
+
8
+ # Compile with custom output directory
9
+ python manage.py compile_proto path/to/file.proto --output-dir generated/
10
+
11
+ # Auto-fix imports (change 'import X' to 'from . import X')
12
+ python manage.py compile_proto path/to/file.proto --fix-imports
13
+
14
+ # Compile all proto files in a directory
15
+ python manage.py compile_proto path/to/protos/ --recursive
16
+ """
17
+
18
+ import logging
19
+ from pathlib import Path
20
+
21
+ from django.core.management.base import BaseCommand, CommandError
22
+
23
+ from django_cfg.apps.integrations.grpc.management.proto.compiler import ProtoCompiler
24
+
25
+ logger = logging.getLogger(__name__)
26
+
27
+
28
+ class Command(BaseCommand):
29
+ help = "Compile .proto files to Python using grpc_tools.protoc"
30
+
31
+ def add_arguments(self, parser):
32
+ parser.add_argument(
33
+ "proto_path",
34
+ type=str,
35
+ help="Path to .proto file or directory containing .proto files",
36
+ )
37
+ parser.add_argument(
38
+ "--output-dir",
39
+ type=str,
40
+ default=None,
41
+ help="Output directory for generated files (default: same as proto file)",
42
+ )
43
+ parser.add_argument(
44
+ "--proto-path",
45
+ type=str,
46
+ default=None,
47
+ help="Additional proto import path (passed to protoc -I flag)",
48
+ )
49
+ parser.add_argument(
50
+ "--fix-imports",
51
+ action="store_true",
52
+ default=True,
53
+ help="Fix imports in generated _grpc.py files (default: True)",
54
+ )
55
+ parser.add_argument(
56
+ "--no-fix-imports",
57
+ action="store_false",
58
+ dest="fix_imports",
59
+ help="Disable import fixing",
60
+ )
61
+ parser.add_argument(
62
+ "--recursive",
63
+ action="store_true",
64
+ help="Recursively compile all .proto files in directory",
65
+ )
66
+
67
+ def handle(self, *args, **options):
68
+ proto_path = Path(options["proto_path"])
69
+ output_dir = Path(options["output_dir"]) if options["output_dir"] else None
70
+ proto_import_path = Path(options["proto_path"]) if options.get("proto_path") else None
71
+ fix_imports = options["fix_imports"]
72
+ recursive = options["recursive"]
73
+
74
+ if not proto_path.exists():
75
+ raise CommandError(f"Path does not exist: {proto_path}")
76
+
77
+ # Create compiler
78
+ compiler = ProtoCompiler(
79
+ output_dir=output_dir,
80
+ proto_import_path=proto_import_path,
81
+ fix_imports=fix_imports,
82
+ verbose=True,
83
+ )
84
+
85
+ self.stdout.write("")
86
+
87
+ # Compile proto file(s)
88
+ if proto_path.is_file():
89
+ success = compiler.compile_file(proto_path)
90
+ if not success:
91
+ raise CommandError(f"Failed to compile {proto_path}")
92
+ else:
93
+ success_count, failure_count = compiler.compile_directory(
94
+ proto_path,
95
+ recursive=recursive,
96
+ )
97
+
98
+ if failure_count > 0:
99
+ raise CommandError(
100
+ f"Failed to compile {failure_count} proto file(s) "
101
+ f"({success_count} succeeded)"
102
+ )
103
+
104
+ self.stdout.write("")
105
+ self.stdout.write(self.style.SUCCESS("🎉 Done! All proto files compiled successfully."))