konecty-sdk-python 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,13 @@
1
+ # Python-generated files
2
+ __pycache__/
3
+ *.py[oc]
4
+ build/
5
+ dist/
6
+ wheels/
7
+ *.egg-info
8
+
9
+ # Virtual environments
10
+ .venv
11
+
12
+ # PyPI
13
+ .pypirc
@@ -0,0 +1 @@
1
+ 3.11
@@ -0,0 +1,30 @@
1
+ Metadata-Version: 2.4
2
+ Name: konecty-sdk-python
3
+ Version: 0.1.0
4
+ Summary: Konecty SDK Python
5
+ Author-email: Leonardo Leal <leonardo.leal@konecty.com>, Derotino Silveira <derotino.silveira@konecty.com>
6
+ License: MIT
7
+ Keywords: api,konecty,sdk
8
+ Classifier: Development Status :: 4 - Beta
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Programming Language :: Python :: 3
12
+ Classifier: Programming Language :: Python :: 3.11
13
+ Classifier: Programming Language :: Python :: 3.12
14
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
15
+ Requires-Python: >=3.11
16
+ Requires-Dist: aiohttp>=3.11.11
17
+ Requires-Dist: asyncio>=3.4.3
18
+ Requires-Dist: black>=24.10.0
19
+ Requires-Dist: email-validator>=2.2.0
20
+ Requires-Dist: inquirer>=3.4.0
21
+ Requires-Dist: pydantic>=2.11.4
22
+ Requires-Dist: pymongo>=4.10.1
23
+ Requires-Dist: requests>=2.32.3
24
+ Requires-Dist: rich>=13.9.4
25
+ Requires-Dist: typing-extensions>=4.12.2
26
+ Description-Content-Type: text/markdown
27
+
28
+ ## Konecty Python SDK
29
+
30
+ > 🛠️ Work in progress
@@ -0,0 +1,3 @@
1
+ ## Konecty Python SDK
2
+
3
+ > 🛠️ Work in progress
@@ -0,0 +1,5 @@
1
+ """Konecty metadata management package."""
2
+
3
+ from .cli import apply_command, backup_command, pull_command
4
+
5
+ __all__ = ["apply_command", "backup_command", "pull_command"]
@@ -0,0 +1,7 @@
1
+ """CLI tools for Konecty metadata management."""
2
+
3
+ from .apply import apply_command
4
+ from .backup import backup_command
5
+ from .pull import pull_command
6
+
7
+ __all__ = ["apply_command", "backup_command", "pull_command"]
@@ -0,0 +1,455 @@
1
+ # /// script
2
+ # requires-python = ">=3.12"
3
+ # dependencies = [
4
+ # "click",
5
+ # "inquirer",
6
+ # "pymongo",
7
+ # "rich",
8
+ # ]
9
+ # ///
10
+ """Script para aplicar alterações locais ao MongoDB."""
11
+
12
+ import json
13
+ from pathlib import Path
14
+ from typing import Any, Dict, List, Literal, Optional, Set, Tuple, TypedDict, cast
15
+
16
+ import inquirer
17
+ from pymongo import MongoClient
18
+ from rich.console import Console
19
+ from rich.progress import Progress
20
+ from rich.table import Table
21
+
22
+
23
+ class DocFiles(TypedDict):
24
+ document: List[Path]
25
+ view: List[Path]
26
+ list: List[Path]
27
+ pivot: List[Path]
28
+ access: List[Path]
29
+ hook: List[Path]
30
+
31
+
32
+ DocType = Literal["document", "view", "list", "pivot", "access", "hook"]
33
+ MetaType = Literal["view", "list", "pivot", "access"]
34
+
35
+
36
+ console = Console()
37
+
38
+
39
+ def load_json_file(file_path: Path) -> dict:
40
+ """Carrega um arquivo JSON."""
41
+ try:
42
+ return json.loads(file_path.read_text())
43
+ except Exception as error:
44
+ console.print(f"[red]Erro ao ler arquivo {file_path}[/red]: {str(error)}")
45
+ return {}
46
+
47
+
48
+ def find_metadata_files(metadata_dir: Path) -> Dict[str, DocFiles]:
49
+ """Encontra todos os arquivos de metadados no diretório."""
50
+ result: Dict[str, DocFiles] = {}
51
+
52
+ for doc_dir in metadata_dir.iterdir():
53
+ if not doc_dir.is_dir():
54
+ continue
55
+
56
+ doc_files: DocFiles = {
57
+ "document": [],
58
+ "view": [],
59
+ "list": [],
60
+ "pivot": [],
61
+ "access": [],
62
+ "hook": [],
63
+ }
64
+
65
+ # Documento principal
66
+ doc_file = doc_dir / "document.json"
67
+ if doc_file.exists():
68
+ doc_files["document"].append(doc_file)
69
+
70
+ # Views, lists, pivots e access
71
+ for type_name in ("view", "list", "pivot", "access"):
72
+ type_name_key = cast(MetaType, type_name)
73
+ type_dir = doc_dir / type_name
74
+ if type_dir.exists():
75
+ doc_files[type_name_key].extend(sorted(type_dir.glob("*.json")))
76
+
77
+ # Hooks
78
+ hook_dir = doc_dir / "hook"
79
+ if hook_dir.exists():
80
+ doc_files["hook"].extend(sorted(hook_dir.glob("*.*")))
81
+
82
+ if any(files for files in doc_files.values()):
83
+ result[doc_dir.name] = doc_files
84
+
85
+ return result
86
+
87
+
88
+ def is_equal_documents(doc1: dict, doc2: dict) -> bool:
89
+ """Compara dois documentos ignorando campos específicos."""
90
+ # Campos para ignorar na comparação
91
+ ignore_fields = ["_id", "_createdAt", "_updatedAt", "_createdBy", "_updatedBy"]
92
+
93
+ # Cria cópias para não modificar os originais
94
+ doc1_copy = doc1.copy()
95
+ doc2_copy = doc2.copy()
96
+
97
+ # Remove campos ignorados
98
+ for field in ignore_fields:
99
+ doc1_copy.pop(field, None)
100
+ doc2_copy.pop(field, None)
101
+
102
+ return doc1_copy == doc2_copy
103
+
104
+
105
+ async def apply_document(
106
+ collection,
107
+ doc_name: str,
108
+ doc_files: DocFiles,
109
+ dry_run: bool = False,
110
+ ) -> Tuple[List[str], List[str], List[str]]:
111
+ """Aplica as alterações de um documento."""
112
+ applied = []
113
+ errors = []
114
+ skipped = []
115
+
116
+ # Documento principal
117
+ if doc_files["document"]:
118
+ doc_data = load_json_file(doc_files["document"][0])
119
+ if doc_data:
120
+ existing_doc = collection.find_one({"name": doc_name, "type": {"$in": ["composite", "document"]}})
121
+
122
+ if existing_doc and is_equal_documents(doc_data, existing_doc):
123
+ skipped.append(f"⚡ {doc_name} (document) [identical]")
124
+ else:
125
+ if not dry_run:
126
+ try:
127
+ collection.replace_one(
128
+ {"name": doc_name, "type": {"$in": ["composite", "document"]}}, doc_data, upsert=True
129
+ )
130
+ applied.append(f"✓ {doc_name} (document)")
131
+ except Exception as error:
132
+ errors.append(f"✗ {doc_name} (document): {str(error)}")
133
+ else:
134
+ applied.append(f"✓ {doc_name} (document) [dry-run]")
135
+
136
+ # Views, lists, pivots e access
137
+ existing_data: dict[str, Any] | None = None
138
+ data: Any
139
+
140
+ for type_name in ("view", "list", "pivot", "access"):
141
+ type_name_key = cast(MetaType, type_name)
142
+ for file_path in doc_files[type_name_key]:
143
+ data = load_json_file(file_path)
144
+ if data:
145
+ existing_data = collection.find_one({"name": data["name"], "type": type_name, "document": doc_name})
146
+
147
+ if existing_data and is_equal_documents(data, existing_data):
148
+ skipped.append(f"⚡ {doc_name}/{type_name}/{data['name']} [identical]")
149
+ else:
150
+ if not dry_run:
151
+ try:
152
+ collection.replace_one(
153
+ {"name": data["name"], "type": type_name, "document": doc_name}, data, upsert=True
154
+ )
155
+ applied.append(f"✓ {doc_name}/{type_name}/{data['name']}")
156
+ except Exception as error:
157
+ errors.append(f"✗ {doc_name}/{type_name}/{data['name']}: {str(error)}")
158
+ else:
159
+ applied.append(f"✓ {doc_name}/{type_name}/{data['name']} [dry-run]")
160
+
161
+ # Hooks
162
+ for file_path in doc_files["hook"]:
163
+ data = file_path.read_text()
164
+ if data:
165
+ existing_data = collection.find_one({"name": doc_name, "type": {"$in": ["composite", "document"]}})
166
+ hook_name = file_path.stem
167
+ if existing_data:
168
+ if existing_data.get(hook_name, None) == data:
169
+ skipped.append(f"⚡ {doc_name}/{hook_name} [identical]")
170
+ else:
171
+ if not dry_run:
172
+ try:
173
+ collection.update_one(
174
+ {"name": doc_name, "type": {"$in": ["composite", "document"]}},
175
+ {"$set": {hook_name: data}},
176
+ )
177
+ applied.append(f"✓ {doc_name}/{hook_name}")
178
+ except Exception as error:
179
+ errors.append(f"✗ {doc_name}/{hook_name}: {str(error)}")
180
+ else:
181
+ applied.append(f"✓ {doc_name}/{hook_name} [dry-run]")
182
+
183
+ return applied, errors, skipped
184
+
185
+
186
+ async def prune_documents(
187
+ collection,
188
+ local_docs: Set[str],
189
+ dry_run: bool = False,
190
+ ) -> Tuple[List[str], List[str]]:
191
+ """Remove documentos que não existem localmente."""
192
+ pruned = []
193
+ errors = []
194
+
195
+ # Encontra documentos no banco que não existem localmente
196
+ remote_docs = set(
197
+ doc["name"]
198
+ for doc in collection.find({"type": {"$in": ["composite", "document"]}, "name": {"$ne": "_id"}}, {"name": 1})
199
+ )
200
+
201
+ to_remove = remote_docs - local_docs
202
+
203
+ for doc_name in to_remove:
204
+ if not dry_run:
205
+ try:
206
+ # Remove documento principal
207
+ collection.delete_one({"name": doc_name, "type": {"$in": ["composite", "document"]}})
208
+ # Remove documentos relacionados
209
+ collection.delete_many({"document": doc_name})
210
+ pruned.append(f"✓ {doc_name}")
211
+ except Exception as error:
212
+ errors.append(f"✗ {doc_name}: {str(error)}")
213
+ else:
214
+ pruned.append(f"✓ {doc_name} [dry-run]")
215
+
216
+ return pruned, errors
217
+
218
+
219
+ async def apply_namespace(
220
+ collection,
221
+ metadata_dir: Path,
222
+ dry_run: bool = False,
223
+ ) -> Tuple[List[str], List[str], List[str]]:
224
+ """Aplica as alterações do Namespace."""
225
+ applied = []
226
+ errors = []
227
+ skipped = []
228
+
229
+ base_namespace = {
230
+ "_id": "Namespace",
231
+ "name": "konecty",
232
+ "type": "namespace",
233
+ "ns": "konecty",
234
+ "active": True,
235
+ }
236
+
237
+ namespace_file = metadata_dir / "Namespace.json"
238
+ if not namespace_file.exists():
239
+ namespace_data = base_namespace
240
+ else:
241
+ namespace_data = load_json_file(namespace_file)
242
+ if not namespace_data:
243
+ return [], ["✗ Namespace.json: Arquivo vazio ou inválido"], []
244
+
245
+ existing_namespace = collection.find_one({"_id": "Namespace"})
246
+
247
+ def compare_namespace(ns1: dict, ns2: dict) -> bool:
248
+ """Compara dois namespaces ignorando _id e name."""
249
+ ns1_copy = ns1.copy()
250
+ ns2_copy = ns2.copy()
251
+
252
+ ignore_fields = ["_id", "name", "ns"]
253
+ for field in ignore_fields:
254
+ ns1_copy.pop(field, None)
255
+ ns2_copy.pop(field, None)
256
+
257
+ return ns1_copy == ns2_copy
258
+
259
+ if existing_namespace and compare_namespace(namespace_data, existing_namespace):
260
+ skipped.append("⚡ Namespace.json [identical]")
261
+ else:
262
+ if not dry_run:
263
+ try:
264
+ if existing_namespace:
265
+ collection.update_one({"_id": "Namespace"}, {"$set": namespace_data})
266
+ else:
267
+ collection.insert_one({**base_namespace, **namespace_data})
268
+ applied.append("✓ Namespace.json")
269
+ except Exception as error:
270
+ errors.append(f"✗ Namespace.json: {str(error)}")
271
+ else:
272
+ applied.append("✓ Namespace.json [dry-run]")
273
+
274
+ return applied, errors, skipped
275
+
276
+
277
+ async def apply_command(
278
+ metadata_dir: str = "metadata",
279
+ host: str = "localhost",
280
+ port: int = 27017,
281
+ database: str = "default",
282
+ username: Optional[str] = None,
283
+ password: Optional[str] = None,
284
+ replicaset: Optional[str] = None,
285
+ document: Optional[str] = None,
286
+ prune: bool = False,
287
+ dry_run: bool = False,
288
+ direct_connection: bool = False,
289
+ retry_writes: bool = True,
290
+ w: str = "majority",
291
+ ) -> None:
292
+ """Aplica alterações locais ao MongoDB."""
293
+ metadata_path = Path(metadata_dir).resolve()
294
+ if not metadata_path.exists():
295
+ console.print(f"[red]Diretório {metadata_dir} não encontrado[/red]")
296
+ return
297
+
298
+ uri_params = []
299
+ if replicaset:
300
+ uri_params.extend(
301
+ [
302
+ f"replicaSet={replicaset}",
303
+ f"directConnection={'true' if direct_connection else 'false'}",
304
+ f"retryWrites={'true' if retry_writes else 'false'}",
305
+ f"w={w}",
306
+ ]
307
+ )
308
+
309
+ uri_suffix = f"?{'&'.join(uri_params)}" if uri_params else ""
310
+
311
+ if username and password:
312
+ uri = f"mongodb://{username}:{password}@{host}:{port}/admin{uri_suffix}"
313
+ else:
314
+ uri = f"mongodb://{host}:{port}{uri_suffix}"
315
+
316
+ client = MongoClient(
317
+ uri, serverSelectionTimeoutMS=30000, connectTimeoutMS=20000, socketTimeoutMS=20000, maxPoolSize=1
318
+ )
319
+ db = client[database]
320
+ collection = db["MetaObjects"]
321
+
322
+ # Se o documento for Namespace, aplica apenas o Namespace
323
+ if document == "Namespace":
324
+ namespace_applied, namespace_errors, namespace_skipped = await apply_namespace(
325
+ collection, metadata_path, dry_run
326
+ )
327
+ table = Table(title="Resultados da Aplicação")
328
+ table.add_column("Documento")
329
+ table.add_column("Status")
330
+ table.add_column("Pulados")
331
+ table.add_column("Erros")
332
+ if namespace_applied or namespace_errors or namespace_skipped:
333
+ table.add_row(
334
+ "@Namespace", "\n".join(namespace_applied), "\n".join(namespace_skipped), "\n".join(namespace_errors)
335
+ )
336
+ client.close()
337
+ console.print(table)
338
+ return
339
+
340
+ # Encontra todos os arquivos de metadados
341
+ all_files = find_metadata_files(metadata_path)
342
+
343
+ if not all_files:
344
+ console.print("[yellow]Nenhum arquivo de metadados encontrado[/yellow]")
345
+ return
346
+
347
+ # Se nenhum documento específico foi fornecido, pergunta ao usuário
348
+ if document is None:
349
+ choices = [
350
+ {"name": "Todos", "value": "all"},
351
+ *[{"name": name, "value": name} for name in sorted(all_files.keys())],
352
+ ]
353
+
354
+ questions = [
355
+ inquirer.List(
356
+ "document",
357
+ message="Qual documento você deseja aplicar?",
358
+ choices=[choice["name"] for choice in choices],
359
+ )
360
+ ]
361
+
362
+ answers = inquirer.prompt(questions)
363
+ if answers is None:
364
+ console.print("[red]Operação cancelada pelo usuário[/red]")
365
+ return
366
+
367
+ document = "all" if answers["document"] == "Todos" else answers["document"]
368
+
369
+ # Determina quais documentos processar
370
+ docs_to_process = list(all_files.keys()) if document == "all" else [document]
371
+
372
+ if document != "all" and document not in all_files:
373
+ console.print(f"[red]Documento {document} não encontrado[/red]")
374
+ return
375
+
376
+ # Aplica o Namespace primeiro
377
+ namespace_applied, namespace_errors, namespace_skipped = await apply_namespace(collection, metadata_path, dry_run)
378
+
379
+ table = Table(title="Resultados da Aplicação")
380
+ table.add_column("Documento")
381
+ table.add_column("Status")
382
+ table.add_column("Pulados")
383
+ table.add_column("Erros")
384
+
385
+ if namespace_applied or namespace_errors or namespace_skipped:
386
+ table.add_row(
387
+ "@Namespace", "\n".join(namespace_applied), "\n".join(namespace_skipped), "\n".join(namespace_errors)
388
+ )
389
+
390
+ with Progress() as progress:
391
+ task = progress.add_task("[cyan]Aplicando alterações...", total=len(docs_to_process))
392
+
393
+ for doc_name in docs_to_process:
394
+ applied, errors, skipped = await apply_document(collection, doc_name, all_files[doc_name], dry_run)
395
+
396
+ table.add_row(doc_name, "\n".join(applied), "\n".join(skipped), "\n".join(errors) if errors else "")
397
+
398
+ progress.update(task, advance=1)
399
+
400
+ if prune:
401
+ pruned, prune_errors = await prune_documents(collection, set(all_files.keys()), dry_run)
402
+ if pruned or prune_errors:
403
+ table.add_row("Prune", "\n".join(pruned), "", "\n".join(prune_errors) if prune_errors else "")
404
+
405
+ client.close()
406
+ console.print(table)
407
+
408
+
409
+ def main():
410
+ """Função principal para execução via linha de comando."""
411
+ import argparse
412
+ import asyncio
413
+ import sys
414
+
415
+ parser = argparse.ArgumentParser(description="Aplica alterações locais ao MongoDB")
416
+ parser.add_argument("--host", default="localhost", help="Host do MongoDB")
417
+ parser.add_argument("--port", type=int, default=27017, help="Porta do MongoDB")
418
+ parser.add_argument("--database", required=True, help="Nome do banco de dados")
419
+ parser.add_argument("--metadata-dir", default="metadata", help="Diretório dos metadados")
420
+ parser.add_argument("--username", help="Usuário do MongoDB")
421
+ parser.add_argument("--password", help="Senha do MongoDB")
422
+ parser.add_argument("--replicaset", help="Nome do ReplicaSet do MongoDB (ex: rs0)")
423
+ parser.add_argument("--document", help="Nome do documento específico para aplicar")
424
+ parser.add_argument("--prune", action="store_true", help="Remove documentos que não existem localmente")
425
+ parser.add_argument("--dry-run", action="store_true", help="Executa sem fazer alterações")
426
+ parser.add_argument("--direct-connection", action="store_true", help="Usa conexão direta com o MongoDB")
427
+ parser.add_argument("--no-retry-writes", action="store_true", help="Desativa retry writes")
428
+ parser.add_argument("--write-concern", default="majority", help="Write concern (ex: majority, 1)")
429
+
430
+ args = parser.parse_args()
431
+
432
+ if sys.platform == "win32":
433
+ asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
434
+
435
+ asyncio.run(
436
+ apply_command(
437
+ metadata_dir=args.metadata_dir,
438
+ host=args.host,
439
+ port=args.port,
440
+ database=args.database,
441
+ username=args.username,
442
+ password=args.password,
443
+ replicaset=args.replicaset,
444
+ document=args.document,
445
+ prune=args.prune,
446
+ dry_run=args.dry_run,
447
+ direct_connection=args.direct_connection,
448
+ retry_writes=not args.no_retry_writes,
449
+ w=args.write_concern,
450
+ )
451
+ )
452
+
453
+
454
+ if __name__ == "__main__":
455
+ main()
@@ -0,0 +1,129 @@
1
+ """Script para fazer backup dos documentos do MongoDB."""
2
+
3
+ import json
4
+ import tarfile
5
+ import tempfile
6
+ from datetime import datetime
7
+ from pathlib import Path
8
+ from typing import Optional
9
+
10
+ from pymongo import MongoClient
11
+ from rich.console import Console
12
+ from rich.progress import Progress
13
+
14
+ console = Console()
15
+
16
+
17
+ async def backup_command(
18
+ host: str = "localhost",
19
+ port: int = 27017,
20
+ database: str = "default",
21
+ output: str = "backups",
22
+ username: Optional[str] = None,
23
+ password: Optional[str] = None,
24
+ replicaset: Optional[str] = None,
25
+ version: Optional[str] = None,
26
+ ) -> None:
27
+ """Gera backup dos documentos do MongoDB."""
28
+ output_dir = Path(output).resolve()
29
+ output_dir.mkdir(parents=True, exist_ok=True)
30
+
31
+ # Gera nome do arquivo de backup
32
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
33
+ version_label = f"_{version}" if version else ""
34
+ backup_file = output_dir / f"backup_{timestamp}{version_label}.tar.gz"
35
+
36
+ uri_params = []
37
+ if replicaset:
38
+ uri_params.extend([f"replicaSet={replicaset}", "directConnection=false", "retryWrites=true", "w=majority"])
39
+
40
+ uri_suffix = f"?{'&'.join(uri_params)}" if uri_params else ""
41
+
42
+ if username and password:
43
+ uri = f"mongodb://{username}:{password}@{host}:{port}/admin{uri_suffix}"
44
+ else:
45
+ uri = f"mongodb://{host}:{port}{uri_suffix}"
46
+
47
+ client = MongoClient(
48
+ uri, serverSelectionTimeoutMS=30000, connectTimeoutMS=20000, socketTimeoutMS=20000, maxPoolSize=1
49
+ )
50
+ db = client[database]
51
+ collection = db["MetaObjects"]
52
+
53
+ # Cria diretório temporário
54
+ with tempfile.TemporaryDirectory() as temp_dir:
55
+ temp_path = Path(temp_dir)
56
+
57
+ # Busca todos os documentos
58
+ documents = list(collection.find({"type": {"$in": ["composite", "document"]}}))
59
+
60
+ with Progress() as progress:
61
+ task = progress.add_task("[cyan]Gerando backup...", total=len(documents))
62
+
63
+ # Processa cada documento
64
+ for doc in documents:
65
+ doc_path = temp_path / doc["name"]
66
+ doc_path.mkdir(parents=True, exist_ok=True)
67
+
68
+ # Salva documento principal
69
+ doc_file = doc_path / "document.json"
70
+ doc_file.write_text(json.dumps(doc, indent=2))
71
+
72
+ # Busca documentos relacionados
73
+ related = list(collection.find({"document": doc["name"]}))
74
+
75
+ for rel in related:
76
+ rel_type = rel["type"]
77
+ type_path = doc_path / rel_type
78
+ type_path.mkdir(exist_ok=True)
79
+
80
+ rel_file = type_path / f"{rel['name']}.json"
81
+ rel_file.write_text(json.dumps(rel, indent=2))
82
+
83
+ progress.update(task, advance=1)
84
+
85
+ # Cria arquivo tar.gz
86
+ with tarfile.open(backup_file, "w:gz") as tar:
87
+ tar.add(temp_dir, arcname="metadata")
88
+
89
+ client.close()
90
+ console.print(f"[green]Backup concluído com sucesso:[/green] [cyan]{backup_file}[/cyan]")
91
+
92
+
93
+ def main():
94
+ """Função principal para execução via linha de comando."""
95
+ import argparse
96
+ import asyncio
97
+ import sys
98
+
99
+ parser = argparse.ArgumentParser(description="Gera backup dos documentos do MongoDB")
100
+ parser.add_argument("--host", default="localhost", help="Host do MongoDB")
101
+ parser.add_argument("--port", type=int, default=27017, help="Porta do MongoDB")
102
+ parser.add_argument("--database", required=True, help="Nome do banco de dados")
103
+ parser.add_argument("--output", default="backups", help="Diretório para salvar o backup")
104
+ parser.add_argument("--username", help="Usuário do MongoDB")
105
+ parser.add_argument("--password", help="Senha do MongoDB")
106
+ parser.add_argument("--replicaset", help="Nome do ReplicaSet do MongoDB (ex: rs0)")
107
+ parser.add_argument("--version", help="Rótulo de versão para o arquivo de backup")
108
+
109
+ args = parser.parse_args()
110
+
111
+ if sys.platform == "win32":
112
+ asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
113
+
114
+ asyncio.run(
115
+ backup_command(
116
+ host=args.host,
117
+ port=args.port,
118
+ database=args.database,
119
+ output=args.output,
120
+ username=args.username,
121
+ password=args.password,
122
+ replicaset=args.replicaset,
123
+ version=args.version,
124
+ )
125
+ )
126
+
127
+
128
+ if __name__ == "__main__":
129
+ main()