konecty-sdk-python 0.1.0__tar.gz → 0.3.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {konecty_sdk_python-0.1.0 → konecty_sdk_python-0.3.0}/PKG-INFO +12 -1
- konecty_sdk_python-0.3.0/README.md +14 -0
- konecty_sdk_python-0.3.0/cli/__init__.py +38 -0
- {konecty_sdk_python-0.1.0 → konecty_sdk_python-0.3.0}/cli/apply.py +120 -70
- {konecty_sdk_python-0.1.0 → konecty_sdk_python-0.3.0}/cli/backup.py +36 -37
- konecty_sdk_python-0.3.0/cli/pull.py +265 -0
- {konecty_sdk_python-0.1.0 → konecty_sdk_python-0.3.0}/pyproject.toml +4 -1
- konecty_sdk_python-0.1.0/README.md +0 -3
- konecty_sdk_python-0.1.0/cli/__init__.py +0 -7
- konecty_sdk_python-0.1.0/cli/pull.py +0 -262
- {konecty_sdk_python-0.1.0 → konecty_sdk_python-0.3.0}/.gitignore +0 -0
- {konecty_sdk_python-0.1.0 → konecty_sdk_python-0.3.0}/.python-version +0 -0
- {konecty_sdk_python-0.1.0 → konecty_sdk_python-0.3.0}/__init__.py +0 -0
- {konecty_sdk_python-0.1.0 → konecty_sdk_python-0.3.0}/lib/client.py +0 -0
- {konecty_sdk_python-0.1.0 → konecty_sdk_python-0.3.0}/lib/file_manager.py +0 -0
- {konecty_sdk_python-0.1.0 → konecty_sdk_python-0.3.0}/lib/filters.py +0 -0
- {konecty_sdk_python-0.1.0 → konecty_sdk_python-0.3.0}/lib/model.py +0 -0
- {konecty_sdk_python-0.1.0 → konecty_sdk_python-0.3.0}/lib/settings.py +0 -0
- {konecty_sdk_python-0.1.0 → konecty_sdk_python-0.3.0}/lib/types.py +0 -0
- {konecty_sdk_python-0.1.0 → konecty_sdk_python-0.3.0}/uv.lock +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: konecty-sdk-python
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.3.0
|
|
4
4
|
Summary: Konecty SDK Python
|
|
5
5
|
Author-email: Leonardo Leal <leonardo.leal@konecty.com>, Derotino Silveira <derotino.silveira@konecty.com>
|
|
6
6
|
License: MIT
|
|
@@ -28,3 +28,14 @@ Description-Content-Type: text/markdown
|
|
|
28
28
|
## Konecty Python SDK
|
|
29
29
|
|
|
30
30
|
> 🛠️ Work in progress
|
|
31
|
+
|
|
32
|
+
#### Build & Publish
|
|
33
|
+
|
|
34
|
+
It is needed to increase the version number on the [pyproject](./pyproject.toml) file.
|
|
35
|
+
|
|
36
|
+
```sh
|
|
37
|
+
|
|
38
|
+
uv build
|
|
39
|
+
uvx twine upload --config-file .pypirc --skip-existing dist/*
|
|
40
|
+
|
|
41
|
+
```
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
## Konecty Python SDK
|
|
2
|
+
|
|
3
|
+
> 🛠️ Work in progress
|
|
4
|
+
|
|
5
|
+
#### Build & Publish
|
|
6
|
+
|
|
7
|
+
It is needed to increase the version number on the [pyproject](./pyproject.toml) file.
|
|
8
|
+
|
|
9
|
+
```sh
|
|
10
|
+
|
|
11
|
+
uv build
|
|
12
|
+
uvx twine upload --config-file .pypirc --skip-existing dist/*
|
|
13
|
+
|
|
14
|
+
```
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""CLI tools for Konecty metadata management."""
|
|
2
|
+
|
|
3
|
+
import click
|
|
4
|
+
from rich.console import Console
|
|
5
|
+
|
|
6
|
+
console = Console()
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@click.group()
|
|
10
|
+
def cli():
|
|
11
|
+
"""Konecty CLI tools for metadata management."""
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
# Import commands after cli group is defined to avoid circular imports
|
|
16
|
+
from .apply import apply_command
|
|
17
|
+
from .backup import backup_command
|
|
18
|
+
from .pull import pull_command
|
|
19
|
+
|
|
20
|
+
# Add commands to the group
|
|
21
|
+
cli.add_command(apply_command)
|
|
22
|
+
cli.add_command(backup_command)
|
|
23
|
+
cli.add_command(pull_command)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def main():
|
|
27
|
+
"""Entry point for the CLI."""
|
|
28
|
+
import asyncio
|
|
29
|
+
import sys
|
|
30
|
+
|
|
31
|
+
if sys.platform == "win32":
|
|
32
|
+
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
|
|
33
|
+
|
|
34
|
+
asyncio.run(cli())
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
if __name__ == "__main__":
|
|
38
|
+
main()
|
|
@@ -13,6 +13,7 @@ import json
|
|
|
13
13
|
from pathlib import Path
|
|
14
14
|
from typing import Any, Dict, List, Literal, Optional, Set, Tuple, TypedDict, cast
|
|
15
15
|
|
|
16
|
+
import click
|
|
16
17
|
import inquirer
|
|
17
18
|
from pymongo import MongoClient
|
|
18
19
|
from rich.console import Console
|
|
@@ -117,7 +118,9 @@ async def apply_document(
|
|
|
117
118
|
if doc_files["document"]:
|
|
118
119
|
doc_data = load_json_file(doc_files["document"][0])
|
|
119
120
|
if doc_data:
|
|
120
|
-
existing_doc = collection.find_one(
|
|
121
|
+
existing_doc = collection.find_one(
|
|
122
|
+
{"name": doc_name, "type": {"$in": ["composite", "document"]}}
|
|
123
|
+
)
|
|
121
124
|
|
|
122
125
|
if existing_doc and is_equal_documents(doc_data, existing_doc):
|
|
123
126
|
skipped.append(f"⚡ {doc_name} (document) [identical]")
|
|
@@ -125,7 +128,12 @@ async def apply_document(
|
|
|
125
128
|
if not dry_run:
|
|
126
129
|
try:
|
|
127
130
|
collection.replace_one(
|
|
128
|
-
{
|
|
131
|
+
{
|
|
132
|
+
"name": doc_name,
|
|
133
|
+
"type": {"$in": ["composite", "document"]},
|
|
134
|
+
},
|
|
135
|
+
doc_data,
|
|
136
|
+
upsert=True,
|
|
129
137
|
)
|
|
130
138
|
applied.append(f"✓ {doc_name} (document)")
|
|
131
139
|
except Exception as error:
|
|
@@ -142,27 +150,43 @@ async def apply_document(
|
|
|
142
150
|
for file_path in doc_files[type_name_key]:
|
|
143
151
|
data = load_json_file(file_path)
|
|
144
152
|
if data:
|
|
145
|
-
existing_data = collection.find_one(
|
|
153
|
+
existing_data = collection.find_one(
|
|
154
|
+
{"name": data["name"], "type": type_name, "document": doc_name}
|
|
155
|
+
)
|
|
146
156
|
|
|
147
157
|
if existing_data and is_equal_documents(data, existing_data):
|
|
148
|
-
skipped.append(
|
|
158
|
+
skipped.append(
|
|
159
|
+
f"⚡ {doc_name}/{type_name}/{data['name']} [identical]"
|
|
160
|
+
)
|
|
149
161
|
else:
|
|
150
162
|
if not dry_run:
|
|
151
163
|
try:
|
|
152
164
|
collection.replace_one(
|
|
153
|
-
{
|
|
165
|
+
{
|
|
166
|
+
"name": data["name"],
|
|
167
|
+
"type": type_name,
|
|
168
|
+
"document": doc_name,
|
|
169
|
+
},
|
|
170
|
+
data,
|
|
171
|
+
upsert=True,
|
|
154
172
|
)
|
|
155
173
|
applied.append(f"✓ {doc_name}/{type_name}/{data['name']}")
|
|
156
174
|
except Exception as error:
|
|
157
|
-
errors.append(
|
|
175
|
+
errors.append(
|
|
176
|
+
f"✗ {doc_name}/{type_name}/{data['name']}: {str(error)}"
|
|
177
|
+
)
|
|
158
178
|
else:
|
|
159
|
-
applied.append(
|
|
179
|
+
applied.append(
|
|
180
|
+
f"✓ {doc_name}/{type_name}/{data['name']} [dry-run]"
|
|
181
|
+
)
|
|
160
182
|
|
|
161
183
|
# Hooks
|
|
162
184
|
for file_path in doc_files["hook"]:
|
|
163
185
|
data = file_path.read_text()
|
|
164
186
|
if data:
|
|
165
|
-
existing_data = collection.find_one(
|
|
187
|
+
existing_data = collection.find_one(
|
|
188
|
+
{"name": doc_name, "type": {"$in": ["composite", "document"]}}
|
|
189
|
+
)
|
|
166
190
|
hook_name = file_path.stem
|
|
167
191
|
if existing_data:
|
|
168
192
|
if existing_data.get(hook_name, None) == data:
|
|
@@ -171,7 +195,10 @@ async def apply_document(
|
|
|
171
195
|
if not dry_run:
|
|
172
196
|
try:
|
|
173
197
|
collection.update_one(
|
|
174
|
-
{
|
|
198
|
+
{
|
|
199
|
+
"name": doc_name,
|
|
200
|
+
"type": {"$in": ["composite", "document"]},
|
|
201
|
+
},
|
|
175
202
|
{"$set": {hook_name: data}},
|
|
176
203
|
)
|
|
177
204
|
applied.append(f"✓ {doc_name}/{hook_name}")
|
|
@@ -195,7 +222,10 @@ async def prune_documents(
|
|
|
195
222
|
# Encontra documentos no banco que não existem localmente
|
|
196
223
|
remote_docs = set(
|
|
197
224
|
doc["name"]
|
|
198
|
-
for doc in collection.find(
|
|
225
|
+
for doc in collection.find(
|
|
226
|
+
{"type": {"$in": ["composite", "document"]}, "name": {"$ne": "_id"}},
|
|
227
|
+
{"name": 1},
|
|
228
|
+
)
|
|
199
229
|
)
|
|
200
230
|
|
|
201
231
|
to_remove = remote_docs - local_docs
|
|
@@ -204,7 +234,9 @@ async def prune_documents(
|
|
|
204
234
|
if not dry_run:
|
|
205
235
|
try:
|
|
206
236
|
# Remove documento principal
|
|
207
|
-
collection.delete_one(
|
|
237
|
+
collection.delete_one(
|
|
238
|
+
{"name": doc_name, "type": {"$in": ["composite", "document"]}}
|
|
239
|
+
)
|
|
208
240
|
# Remove documentos relacionados
|
|
209
241
|
collection.delete_many({"document": doc_name})
|
|
210
242
|
pruned.append(f"✓ {doc_name}")
|
|
@@ -262,7 +294,9 @@ async def apply_namespace(
|
|
|
262
294
|
if not dry_run:
|
|
263
295
|
try:
|
|
264
296
|
if existing_namespace:
|
|
265
|
-
collection.update_one(
|
|
297
|
+
collection.update_one(
|
|
298
|
+
{"_id": "Namespace"}, {"$set": namespace_data}
|
|
299
|
+
)
|
|
266
300
|
else:
|
|
267
301
|
collection.insert_one({**base_namespace, **namespace_data})
|
|
268
302
|
applied.append("✓ Namespace.json")
|
|
@@ -274,20 +308,41 @@ async def apply_namespace(
|
|
|
274
308
|
return applied, errors, skipped
|
|
275
309
|
|
|
276
310
|
|
|
311
|
+
@click.command(name="apply")
|
|
312
|
+
@click.option("--metadata-dir", default="metadata", help="Diretório com os metadados")
|
|
313
|
+
@click.option("--host", default="localhost", help="Host do MongoDB")
|
|
314
|
+
@click.option("--port", type=int, default=27017, help="Porta do MongoDB")
|
|
315
|
+
@click.option("--database", default="default", help="Nome do banco de dados")
|
|
316
|
+
@click.option("--username", help="Usuário do MongoDB")
|
|
317
|
+
@click.option("--password", help="Senha do MongoDB")
|
|
318
|
+
@click.option("--replicaset", help="Nome do ReplicaSet do MongoDB (ex: rs0)")
|
|
319
|
+
@click.option("--document", help="Nome do documento para aplicar")
|
|
320
|
+
@click.option(
|
|
321
|
+
"--prune", is_flag=True, help="Remove documentos que não existem localmente"
|
|
322
|
+
)
|
|
323
|
+
@click.option("--dry-run", is_flag=True, help="Simula as alterações sem aplicá-las")
|
|
324
|
+
@click.option("--direct-connection", is_flag=True, help="Usa conexão direta ao MongoDB")
|
|
325
|
+
@click.option(
|
|
326
|
+
"--retry-writes",
|
|
327
|
+
is_flag=True,
|
|
328
|
+
default=True,
|
|
329
|
+
help="Tenta reescrever em caso de falha",
|
|
330
|
+
)
|
|
331
|
+
@click.option("--w", default="majority", help="Nível de escrita do MongoDB")
|
|
277
332
|
async def apply_command(
|
|
278
|
-
metadata_dir: str
|
|
279
|
-
host: str
|
|
280
|
-
port: int
|
|
281
|
-
database: str
|
|
282
|
-
username: Optional[str]
|
|
283
|
-
password: Optional[str]
|
|
284
|
-
replicaset: Optional[str]
|
|
285
|
-
document: Optional[str]
|
|
286
|
-
prune: bool
|
|
287
|
-
dry_run: bool
|
|
288
|
-
direct_connection: bool
|
|
289
|
-
retry_writes: bool
|
|
290
|
-
w: str
|
|
333
|
+
metadata_dir: str,
|
|
334
|
+
host: str,
|
|
335
|
+
port: int,
|
|
336
|
+
database: str,
|
|
337
|
+
username: Optional[str],
|
|
338
|
+
password: Optional[str],
|
|
339
|
+
replicaset: Optional[str],
|
|
340
|
+
document: Optional[str],
|
|
341
|
+
prune: bool,
|
|
342
|
+
dry_run: bool,
|
|
343
|
+
direct_connection: bool,
|
|
344
|
+
retry_writes: bool,
|
|
345
|
+
w: str,
|
|
291
346
|
) -> None:
|
|
292
347
|
"""Aplica alterações locais ao MongoDB."""
|
|
293
348
|
metadata_path = Path(metadata_dir).resolve()
|
|
@@ -314,7 +369,11 @@ async def apply_command(
|
|
|
314
369
|
uri = f"mongodb://{host}:{port}{uri_suffix}"
|
|
315
370
|
|
|
316
371
|
client = MongoClient(
|
|
317
|
-
uri,
|
|
372
|
+
uri,
|
|
373
|
+
serverSelectionTimeoutMS=30000,
|
|
374
|
+
connectTimeoutMS=20000,
|
|
375
|
+
socketTimeoutMS=20000,
|
|
376
|
+
maxPoolSize=1,
|
|
318
377
|
)
|
|
319
378
|
db = client[database]
|
|
320
379
|
collection = db["MetaObjects"]
|
|
@@ -331,7 +390,10 @@ async def apply_command(
|
|
|
331
390
|
table.add_column("Erros")
|
|
332
391
|
if namespace_applied or namespace_errors or namespace_skipped:
|
|
333
392
|
table.add_row(
|
|
334
|
-
"@Namespace",
|
|
393
|
+
"@Namespace",
|
|
394
|
+
"\n".join(namespace_applied),
|
|
395
|
+
"\n".join(namespace_skipped),
|
|
396
|
+
"\n".join(namespace_errors),
|
|
335
397
|
)
|
|
336
398
|
client.close()
|
|
337
399
|
console.print(table)
|
|
@@ -374,7 +436,9 @@ async def apply_command(
|
|
|
374
436
|
return
|
|
375
437
|
|
|
376
438
|
# Aplica o Namespace primeiro
|
|
377
|
-
namespace_applied, namespace_errors, namespace_skipped = await apply_namespace(
|
|
439
|
+
namespace_applied, namespace_errors, namespace_skipped = await apply_namespace(
|
|
440
|
+
collection, metadata_path, dry_run
|
|
441
|
+
)
|
|
378
442
|
|
|
379
443
|
table = Table(title="Resultados da Aplicação")
|
|
380
444
|
table.add_column("Documento")
|
|
@@ -384,72 +448,58 @@ async def apply_command(
|
|
|
384
448
|
|
|
385
449
|
if namespace_applied or namespace_errors or namespace_skipped:
|
|
386
450
|
table.add_row(
|
|
387
|
-
"@Namespace",
|
|
451
|
+
"@Namespace",
|
|
452
|
+
"\n".join(namespace_applied),
|
|
453
|
+
"\n".join(namespace_skipped),
|
|
454
|
+
"\n".join(namespace_errors),
|
|
388
455
|
)
|
|
389
456
|
|
|
390
457
|
with Progress() as progress:
|
|
391
|
-
task = progress.add_task(
|
|
458
|
+
task = progress.add_task(
|
|
459
|
+
"[cyan]Aplicando alterações...", total=len(docs_to_process)
|
|
460
|
+
)
|
|
392
461
|
|
|
393
462
|
for doc_name in docs_to_process:
|
|
394
|
-
applied, errors, skipped = await apply_document(
|
|
463
|
+
applied, errors, skipped = await apply_document(
|
|
464
|
+
collection, doc_name, all_files[doc_name], dry_run
|
|
465
|
+
)
|
|
395
466
|
|
|
396
|
-
table.add_row(
|
|
467
|
+
table.add_row(
|
|
468
|
+
doc_name,
|
|
469
|
+
"\n".join(applied),
|
|
470
|
+
"\n".join(skipped),
|
|
471
|
+
"\n".join(errors) if errors else "",
|
|
472
|
+
)
|
|
397
473
|
|
|
398
474
|
progress.update(task, advance=1)
|
|
399
475
|
|
|
400
476
|
if prune:
|
|
401
|
-
pruned, prune_errors = await prune_documents(
|
|
477
|
+
pruned, prune_errors = await prune_documents(
|
|
478
|
+
collection, set(all_files.keys()), dry_run
|
|
479
|
+
)
|
|
402
480
|
if pruned or prune_errors:
|
|
403
|
-
table.add_row(
|
|
481
|
+
table.add_row(
|
|
482
|
+
"Prune",
|
|
483
|
+
"\n".join(pruned),
|
|
484
|
+
"",
|
|
485
|
+
"\n".join(prune_errors) if prune_errors else "",
|
|
486
|
+
)
|
|
404
487
|
|
|
405
488
|
client.close()
|
|
406
489
|
console.print(table)
|
|
407
490
|
|
|
408
491
|
|
|
409
492
|
def main():
|
|
410
|
-
"""
|
|
411
|
-
import argparse
|
|
493
|
+
"""Entry point for the CLI."""
|
|
412
494
|
import asyncio
|
|
413
495
|
import sys
|
|
414
496
|
|
|
415
|
-
parser = argparse.ArgumentParser(description="Aplica alterações locais ao MongoDB")
|
|
416
|
-
parser.add_argument("--host", default="localhost", help="Host do MongoDB")
|
|
417
|
-
parser.add_argument("--port", type=int, default=27017, help="Porta do MongoDB")
|
|
418
|
-
parser.add_argument("--database", required=True, help="Nome do banco de dados")
|
|
419
|
-
parser.add_argument("--metadata-dir", default="metadata", help="Diretório dos metadados")
|
|
420
|
-
parser.add_argument("--username", help="Usuário do MongoDB")
|
|
421
|
-
parser.add_argument("--password", help="Senha do MongoDB")
|
|
422
|
-
parser.add_argument("--replicaset", help="Nome do ReplicaSet do MongoDB (ex: rs0)")
|
|
423
|
-
parser.add_argument("--document", help="Nome do documento específico para aplicar")
|
|
424
|
-
parser.add_argument("--prune", action="store_true", help="Remove documentos que não existem localmente")
|
|
425
|
-
parser.add_argument("--dry-run", action="store_true", help="Executa sem fazer alterações")
|
|
426
|
-
parser.add_argument("--direct-connection", action="store_true", help="Usa conexão direta com o MongoDB")
|
|
427
|
-
parser.add_argument("--no-retry-writes", action="store_true", help="Desativa retry writes")
|
|
428
|
-
parser.add_argument("--write-concern", default="majority", help="Write concern (ex: majority, 1)")
|
|
429
|
-
|
|
430
|
-
args = parser.parse_args()
|
|
431
|
-
|
|
432
497
|
if sys.platform == "win32":
|
|
433
498
|
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
|
|
434
499
|
|
|
435
|
-
asyncio.run(
|
|
436
|
-
apply_command(
|
|
437
|
-
metadata_dir=args.metadata_dir,
|
|
438
|
-
host=args.host,
|
|
439
|
-
port=args.port,
|
|
440
|
-
database=args.database,
|
|
441
|
-
username=args.username,
|
|
442
|
-
password=args.password,
|
|
443
|
-
replicaset=args.replicaset,
|
|
444
|
-
document=args.document,
|
|
445
|
-
prune=args.prune,
|
|
446
|
-
dry_run=args.dry_run,
|
|
447
|
-
direct_connection=args.direct_connection,
|
|
448
|
-
retry_writes=not args.no_retry_writes,
|
|
449
|
-
w=args.write_concern,
|
|
450
|
-
)
|
|
451
|
-
)
|
|
500
|
+
asyncio.run(apply_command())
|
|
452
501
|
|
|
453
502
|
|
|
454
503
|
if __name__ == "__main__":
|
|
455
504
|
main()
|
|
505
|
+
main()
|
|
@@ -7,6 +7,7 @@ from datetime import datetime
|
|
|
7
7
|
from pathlib import Path
|
|
8
8
|
from typing import Optional
|
|
9
9
|
|
|
10
|
+
import click
|
|
10
11
|
from pymongo import MongoClient
|
|
11
12
|
from rich.console import Console
|
|
12
13
|
from rich.progress import Progress
|
|
@@ -14,15 +15,24 @@ from rich.progress import Progress
|
|
|
14
15
|
console = Console()
|
|
15
16
|
|
|
16
17
|
|
|
18
|
+
@click.command(name="backup")
|
|
19
|
+
@click.option("--host", default="localhost", help="Host do MongoDB")
|
|
20
|
+
@click.option("--port", type=int, default=27017, help="Porta do MongoDB")
|
|
21
|
+
@click.option("--database", required=True, help="Nome do banco de dados")
|
|
22
|
+
@click.option("--output", default="backups", help="Diretório para salvar o backup")
|
|
23
|
+
@click.option("--username", help="Usuário do MongoDB")
|
|
24
|
+
@click.option("--password", help="Senha do MongoDB")
|
|
25
|
+
@click.option("--replicaset", help="Nome do ReplicaSet do MongoDB (ex: rs0)")
|
|
26
|
+
@click.option("--version", help="Rótulo de versão para o arquivo de backup")
|
|
17
27
|
async def backup_command(
|
|
18
|
-
host: str
|
|
19
|
-
port: int
|
|
20
|
-
database: str
|
|
21
|
-
output: str
|
|
22
|
-
username: Optional[str]
|
|
23
|
-
password: Optional[str]
|
|
24
|
-
replicaset: Optional[str]
|
|
25
|
-
version: Optional[str]
|
|
28
|
+
host: str,
|
|
29
|
+
port: int,
|
|
30
|
+
database: str,
|
|
31
|
+
output: str,
|
|
32
|
+
username: Optional[str],
|
|
33
|
+
password: Optional[str],
|
|
34
|
+
replicaset: Optional[str],
|
|
35
|
+
version: Optional[str],
|
|
26
36
|
) -> None:
|
|
27
37
|
"""Gera backup dos documentos do MongoDB."""
|
|
28
38
|
output_dir = Path(output).resolve()
|
|
@@ -35,7 +45,14 @@ async def backup_command(
|
|
|
35
45
|
|
|
36
46
|
uri_params = []
|
|
37
47
|
if replicaset:
|
|
38
|
-
uri_params.extend(
|
|
48
|
+
uri_params.extend(
|
|
49
|
+
[
|
|
50
|
+
f"replicaSet={replicaset}",
|
|
51
|
+
"directConnection=false",
|
|
52
|
+
"retryWrites=true",
|
|
53
|
+
"w=majority",
|
|
54
|
+
]
|
|
55
|
+
)
|
|
39
56
|
|
|
40
57
|
uri_suffix = f"?{'&'.join(uri_params)}" if uri_params else ""
|
|
41
58
|
|
|
@@ -45,7 +62,11 @@ async def backup_command(
|
|
|
45
62
|
uri = f"mongodb://{host}:{port}{uri_suffix}"
|
|
46
63
|
|
|
47
64
|
client = MongoClient(
|
|
48
|
-
uri,
|
|
65
|
+
uri,
|
|
66
|
+
serverSelectionTimeoutMS=30000,
|
|
67
|
+
connectTimeoutMS=20000,
|
|
68
|
+
socketTimeoutMS=20000,
|
|
69
|
+
maxPoolSize=1,
|
|
49
70
|
)
|
|
50
71
|
db = client[database]
|
|
51
72
|
collection = db["MetaObjects"]
|
|
@@ -87,42 +108,20 @@ async def backup_command(
|
|
|
87
108
|
tar.add(temp_dir, arcname="metadata")
|
|
88
109
|
|
|
89
110
|
client.close()
|
|
90
|
-
console.print(
|
|
111
|
+
console.print(
|
|
112
|
+
f"[green]Backup concluído com sucesso:[/green] [cyan]{backup_file}[/cyan]"
|
|
113
|
+
)
|
|
91
114
|
|
|
92
115
|
|
|
93
116
|
def main():
|
|
94
|
-
"""
|
|
95
|
-
import argparse
|
|
117
|
+
"""Entry point for the CLI."""
|
|
96
118
|
import asyncio
|
|
97
119
|
import sys
|
|
98
120
|
|
|
99
|
-
parser = argparse.ArgumentParser(description="Gera backup dos documentos do MongoDB")
|
|
100
|
-
parser.add_argument("--host", default="localhost", help="Host do MongoDB")
|
|
101
|
-
parser.add_argument("--port", type=int, default=27017, help="Porta do MongoDB")
|
|
102
|
-
parser.add_argument("--database", required=True, help="Nome do banco de dados")
|
|
103
|
-
parser.add_argument("--output", default="backups", help="Diretório para salvar o backup")
|
|
104
|
-
parser.add_argument("--username", help="Usuário do MongoDB")
|
|
105
|
-
parser.add_argument("--password", help="Senha do MongoDB")
|
|
106
|
-
parser.add_argument("--replicaset", help="Nome do ReplicaSet do MongoDB (ex: rs0)")
|
|
107
|
-
parser.add_argument("--version", help="Rótulo de versão para o arquivo de backup")
|
|
108
|
-
|
|
109
|
-
args = parser.parse_args()
|
|
110
|
-
|
|
111
121
|
if sys.platform == "win32":
|
|
112
122
|
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
|
|
113
123
|
|
|
114
|
-
asyncio.run(
|
|
115
|
-
backup_command(
|
|
116
|
-
host=args.host,
|
|
117
|
-
port=args.port,
|
|
118
|
-
database=args.database,
|
|
119
|
-
output=args.output,
|
|
120
|
-
username=args.username,
|
|
121
|
-
password=args.password,
|
|
122
|
-
replicaset=args.replicaset,
|
|
123
|
-
version=args.version,
|
|
124
|
-
)
|
|
125
|
-
)
|
|
124
|
+
asyncio.run(backup_command())
|
|
126
125
|
|
|
127
126
|
|
|
128
127
|
if __name__ == "__main__":
|
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
"""Script para extrair dados do MongoDB e gerar arquivos JSON."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, Dict, List, Literal, Optional, TypedDict, Union, cast
|
|
6
|
+
|
|
7
|
+
import black
|
|
8
|
+
import click
|
|
9
|
+
import inquirer
|
|
10
|
+
from pymongo import MongoClient
|
|
11
|
+
from rich.console import Console
|
|
12
|
+
from rich.progress import Progress
|
|
13
|
+
from rich.table import Table
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class DocFiles(TypedDict):
|
|
17
|
+
document: List[Path]
|
|
18
|
+
view: List[Path]
|
|
19
|
+
list: List[Path]
|
|
20
|
+
pivot: List[Path]
|
|
21
|
+
access: List[Path]
|
|
22
|
+
hook: List[Path]
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class RelatedResults(TypedDict):
|
|
26
|
+
view: List[str]
|
|
27
|
+
list: List[str]
|
|
28
|
+
pivot: List[str]
|
|
29
|
+
access: List[str]
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class MongoFilter(TypedDict, total=False):
|
|
33
|
+
document: str
|
|
34
|
+
type: str
|
|
35
|
+
name: str
|
|
36
|
+
or_conditions: List[Dict[str, str]]
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class MongoCondition(TypedDict, total=False):
|
|
40
|
+
type: str
|
|
41
|
+
name: str
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
MongoQuery = Dict[str, Union[str, List[MongoCondition]]]
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
DocType = Literal["document", "view", "list", "pivot", "access", "hook"]
|
|
48
|
+
MetaType = Literal["view", "list", "pivot", "access"]
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
console = Console()
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def format_code(name: str, code: str) -> str:
|
|
55
|
+
"""Formata o código JavaScript usando black."""
|
|
56
|
+
try:
|
|
57
|
+
return black.format_str(code, mode=black.Mode())
|
|
58
|
+
except Exception as error:
|
|
59
|
+
console.print(f"[red]Erro ao formatar código {name}[/red]: {str(error)}")
|
|
60
|
+
return code
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
async def write_file(file_path: str, content: str) -> None:
|
|
64
|
+
"""Escreve conteúdo em um arquivo, criando diretórios se necessário."""
|
|
65
|
+
try:
|
|
66
|
+
path = Path(file_path)
|
|
67
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
68
|
+
path.write_text(content)
|
|
69
|
+
except Exception as error:
|
|
70
|
+
console.print(f"[red]Erro ao escrever arquivo {file_path}[/red]: {str(error)}")
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@click.command(name="pull")
|
|
74
|
+
@click.option("--host", default="localhost", help="Host do MongoDB")
|
|
75
|
+
@click.option("--port", type=int, default=27017, help="Porta do MongoDB")
|
|
76
|
+
@click.option("--database", default="default", help="Nome do banco de dados")
|
|
77
|
+
@click.option("--output", default="metadata", help="Diretório para salvar os metadados")
|
|
78
|
+
@click.option("--username", help="Usuário do MongoDB")
|
|
79
|
+
@click.option("--password", help="Senha do MongoDB")
|
|
80
|
+
@click.option("--replicaset", help="Nome do ReplicaSet do MongoDB (ex: rs0)")
|
|
81
|
+
@click.option("--document", help="Nome do documento para baixar")
|
|
82
|
+
@click.option("--view", help="Nome da view específica para extrair")
|
|
83
|
+
@click.option("--list", "list_param", help="Nome da lista específica para extrair")
|
|
84
|
+
@click.option("--pivot", help="Nome do pivot específico para extrair")
|
|
85
|
+
@click.option("--access", help="Nome do access específica para extrair")
|
|
86
|
+
@click.option("--hook", help="Nome do hook específico para extrair")
|
|
87
|
+
@click.option(
|
|
88
|
+
"--all",
|
|
89
|
+
"extract_all",
|
|
90
|
+
is_flag=True,
|
|
91
|
+
help="Extrair todas as collections sem perguntar",
|
|
92
|
+
)
|
|
93
|
+
async def pull_command(
|
|
94
|
+
host: str,
|
|
95
|
+
port: int,
|
|
96
|
+
database: str,
|
|
97
|
+
output: str,
|
|
98
|
+
username: Optional[str],
|
|
99
|
+
password: Optional[str],
|
|
100
|
+
replicaset: Optional[str],
|
|
101
|
+
document: Optional[str],
|
|
102
|
+
view: Optional[str],
|
|
103
|
+
list_param: Optional[str],
|
|
104
|
+
pivot: Optional[str],
|
|
105
|
+
access: Optional[str],
|
|
106
|
+
hook: Optional[str],
|
|
107
|
+
extract_all: bool,
|
|
108
|
+
) -> None:
|
|
109
|
+
"""Baixa metadados do MongoDB."""
|
|
110
|
+
output_dir = Path(output).resolve()
|
|
111
|
+
output_dir.mkdir(parents=True, exist_ok=True)
|
|
112
|
+
|
|
113
|
+
uri_params = []
|
|
114
|
+
if replicaset:
|
|
115
|
+
uri_params.extend(
|
|
116
|
+
[
|
|
117
|
+
f"replicaSet={replicaset}",
|
|
118
|
+
"directConnection=false",
|
|
119
|
+
"retryWrites=true",
|
|
120
|
+
"w=majority",
|
|
121
|
+
]
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
uri_suffix = f"?{'&'.join(uri_params)}" if uri_params else ""
|
|
125
|
+
|
|
126
|
+
if username and password:
|
|
127
|
+
uri = f"mongodb://{username}:{password}@{host}:{port}/admin{uri_suffix}"
|
|
128
|
+
else:
|
|
129
|
+
uri = f"mongodb://{host}:{port}{uri_suffix}"
|
|
130
|
+
|
|
131
|
+
client = MongoClient(
|
|
132
|
+
uri,
|
|
133
|
+
serverSelectionTimeoutMS=30000,
|
|
134
|
+
connectTimeoutMS=20000,
|
|
135
|
+
socketTimeoutMS=20000,
|
|
136
|
+
maxPoolSize=1,
|
|
137
|
+
)
|
|
138
|
+
db = client[database]
|
|
139
|
+
collection = db["MetaObjects"]
|
|
140
|
+
|
|
141
|
+
table = Table(title="Resultados da Extração")
|
|
142
|
+
table.add_column("Documento")
|
|
143
|
+
table.add_column("Hook")
|
|
144
|
+
table.add_column("View")
|
|
145
|
+
table.add_column("List")
|
|
146
|
+
table.add_column("Pivot")
|
|
147
|
+
table.add_column("Access")
|
|
148
|
+
|
|
149
|
+
if document is None and not extract_all:
|
|
150
|
+
document_names = list(
|
|
151
|
+
collection.find(
|
|
152
|
+
{"type": {"$in": ["composite", "document"]}}, {"name": 1}
|
|
153
|
+
).sort("name", 1)
|
|
154
|
+
)
|
|
155
|
+
if not document_names:
|
|
156
|
+
console.print("[red]Nenhum documento encontrado.[/red]")
|
|
157
|
+
return
|
|
158
|
+
|
|
159
|
+
document_names = [doc["name"] for doc in document_names]
|
|
160
|
+
document_names.append("Todos")
|
|
161
|
+
|
|
162
|
+
questions = [
|
|
163
|
+
inquirer.List(
|
|
164
|
+
"document",
|
|
165
|
+
message="Selecione o documento para extrair",
|
|
166
|
+
choices=document_names,
|
|
167
|
+
)
|
|
168
|
+
]
|
|
169
|
+
answers = inquirer.prompt(questions)
|
|
170
|
+
if not answers:
|
|
171
|
+
return
|
|
172
|
+
document = "all" if answers["document"] == "Todos" else answers["document"]
|
|
173
|
+
elif extract_all:
|
|
174
|
+
document = "all"
|
|
175
|
+
|
|
176
|
+
filter_query: Dict[str, Any] = {"type": {"$in": ["composite", "document"]}}
|
|
177
|
+
if document != "all":
|
|
178
|
+
filter_query["name"] = document
|
|
179
|
+
|
|
180
|
+
documents = list(collection.find(filter_query).sort("name", 1))
|
|
181
|
+
|
|
182
|
+
with Progress() as progress:
|
|
183
|
+
task = progress.add_task("[cyan]Extraindo metadados...", total=len(documents))
|
|
184
|
+
|
|
185
|
+
for doc in documents:
|
|
186
|
+
doc_path = output_dir / doc["name"]
|
|
187
|
+
doc_path.mkdir(parents=True, exist_ok=True)
|
|
188
|
+
|
|
189
|
+
# Processando hooks
|
|
190
|
+
hook_results = []
|
|
191
|
+
if "validationData" in doc:
|
|
192
|
+
await write_file(
|
|
193
|
+
str(doc_path / "hook" / "validationData.json"),
|
|
194
|
+
json.dumps(doc["validationData"], indent=2),
|
|
195
|
+
)
|
|
196
|
+
hook_results.append("✓ validationData")
|
|
197
|
+
|
|
198
|
+
for script_type in [
|
|
199
|
+
"scriptBeforeValidation",
|
|
200
|
+
"validationScript",
|
|
201
|
+
"scriptAfterSave",
|
|
202
|
+
]:
|
|
203
|
+
if script_type in doc:
|
|
204
|
+
formatted = format_code(f"{script_type}.js", doc[script_type])
|
|
205
|
+
await write_file(
|
|
206
|
+
str(doc_path / "hook" / f"{script_type}.js"), formatted
|
|
207
|
+
)
|
|
208
|
+
hook_results.append(f"✓ {script_type}")
|
|
209
|
+
|
|
210
|
+
# Processando views, lists, pivots e access
|
|
211
|
+
related_results: RelatedResults = {
|
|
212
|
+
"view": [],
|
|
213
|
+
"list": [],
|
|
214
|
+
"pivot": [],
|
|
215
|
+
"access": [],
|
|
216
|
+
}
|
|
217
|
+
for type_name, param in [
|
|
218
|
+
("view", view),
|
|
219
|
+
("list", list_param),
|
|
220
|
+
("pivot", pivot),
|
|
221
|
+
("access", access),
|
|
222
|
+
]:
|
|
223
|
+
if param is not None:
|
|
224
|
+
condition = {"type": type_name}
|
|
225
|
+
if param != "all":
|
|
226
|
+
condition["name"] = param
|
|
227
|
+
related_metas = list(collection.find(condition).sort("_id", 1))
|
|
228
|
+
|
|
229
|
+
for meta in related_metas:
|
|
230
|
+
meta_type = cast(MetaType, meta["type"])
|
|
231
|
+
if meta_type in ("view", "list", "pivot", "access"):
|
|
232
|
+
await write_file(
|
|
233
|
+
str(doc_path / meta_type / f"{meta['name']}.json"),
|
|
234
|
+
json.dumps(meta, indent=2),
|
|
235
|
+
)
|
|
236
|
+
related_results[meta_type].append(f"✓ {meta['name']}")
|
|
237
|
+
|
|
238
|
+
table.add_row(
|
|
239
|
+
doc["name"],
|
|
240
|
+
"\n".join(hook_results),
|
|
241
|
+
"\n".join(related_results["view"]),
|
|
242
|
+
"\n".join(related_results["list"]),
|
|
243
|
+
"\n".join(related_results["pivot"]),
|
|
244
|
+
"\n".join(related_results["access"]),
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
progress.update(task, advance=1)
|
|
248
|
+
|
|
249
|
+
console.print(table)
|
|
250
|
+
client.close()
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def main():
|
|
254
|
+
"""Entry point for the CLI."""
|
|
255
|
+
import asyncio
|
|
256
|
+
import sys
|
|
257
|
+
|
|
258
|
+
if sys.platform == "win32":
|
|
259
|
+
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
|
|
260
|
+
|
|
261
|
+
asyncio.run(pull_command())
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
if __name__ == "__main__":
|
|
265
|
+
main()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "konecty-sdk-python"
|
|
3
|
-
version = "0.
|
|
3
|
+
version = "0.3.0"
|
|
4
4
|
description = "Konecty SDK Python"
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
requires-python = ">=3.11"
|
|
@@ -32,6 +32,9 @@ dependencies = [
|
|
|
32
32
|
"pydantic>=2.11.4",
|
|
33
33
|
]
|
|
34
34
|
|
|
35
|
+
[project.scripts]
|
|
36
|
+
konecty-cli = "cli:main"
|
|
37
|
+
|
|
35
38
|
[build-system]
|
|
36
39
|
requires = ["hatchling"]
|
|
37
40
|
build-backend = "hatchling.build"
|
|
@@ -1,262 +0,0 @@
|
|
|
1
|
-
"""Script para extrair dados do MongoDB e gerar arquivos JSON."""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
from pathlib import Path
|
|
5
|
-
from typing import Any, Dict, List, Literal, Optional, TypedDict, Union, cast
|
|
6
|
-
|
|
7
|
-
import black
|
|
8
|
-
import inquirer
|
|
9
|
-
from pymongo import MongoClient
|
|
10
|
-
from rich.console import Console
|
|
11
|
-
from rich.progress import Progress
|
|
12
|
-
from rich.table import Table
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
class DocFiles(TypedDict):
|
|
16
|
-
document: List[Path]
|
|
17
|
-
view: List[Path]
|
|
18
|
-
list: List[Path]
|
|
19
|
-
pivot: List[Path]
|
|
20
|
-
access: List[Path]
|
|
21
|
-
hook: List[Path]
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class RelatedResults(TypedDict):
|
|
25
|
-
view: List[str]
|
|
26
|
-
list: List[str]
|
|
27
|
-
pivot: List[str]
|
|
28
|
-
access: List[str]
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
class MongoFilter(TypedDict, total=False):
|
|
32
|
-
document: str
|
|
33
|
-
type: str
|
|
34
|
-
name: str
|
|
35
|
-
or_conditions: List[Dict[str, str]]
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
class MongoCondition(TypedDict, total=False):
|
|
39
|
-
type: str
|
|
40
|
-
name: str
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
MongoQuery = Dict[str, Union[str, List[MongoCondition]]]
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
DocType = Literal["document", "view", "list", "pivot", "access", "hook"]
|
|
47
|
-
MetaType = Literal["view", "list", "pivot", "access"]
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
console = Console()
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
def format_code(name: str, code: str) -> str:
|
|
54
|
-
"""Formata o código JavaScript usando black."""
|
|
55
|
-
try:
|
|
56
|
-
return black.format_str(code, mode=black.Mode())
|
|
57
|
-
except Exception as error:
|
|
58
|
-
console.print(f"[red]Erro ao formatar código {name}[/red]: {str(error)}")
|
|
59
|
-
return code
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
async def write_file(file_path: str, content: str) -> None:
|
|
63
|
-
"""Escreve conteúdo em um arquivo, criando diretórios se necessário."""
|
|
64
|
-
try:
|
|
65
|
-
path = Path(file_path)
|
|
66
|
-
path.parent.mkdir(parents=True, exist_ok=True)
|
|
67
|
-
path.write_text(content)
|
|
68
|
-
except Exception as error:
|
|
69
|
-
console.print(f"[red]Erro ao escrever arquivo {file_path}[/red]: {str(error)}")
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
async def pull_command(
|
|
73
|
-
doc_parameter: Optional[str] = None,
|
|
74
|
-
host: str = "localhost",
|
|
75
|
-
port: int = 27017,
|
|
76
|
-
database: str = "default",
|
|
77
|
-
output: str = "metadata",
|
|
78
|
-
view: Optional[str] = None,
|
|
79
|
-
list_param: Optional[str] = None,
|
|
80
|
-
pivot: Optional[str] = None,
|
|
81
|
-
access: Optional[str] = None,
|
|
82
|
-
hook: Optional[str] = None,
|
|
83
|
-
username: Optional[str] = None,
|
|
84
|
-
password: Optional[str] = None,
|
|
85
|
-
replicaset: Optional[str] = None,
|
|
86
|
-
extract_all: bool = False,
|
|
87
|
-
) -> None:
|
|
88
|
-
"""Extrai dados do MongoDB e gera arquivos JSON."""
|
|
89
|
-
output_dir = Path(output).resolve()
|
|
90
|
-
|
|
91
|
-
uri_params = []
|
|
92
|
-
if replicaset:
|
|
93
|
-
uri_params.extend([f"replicaSet={replicaset}", "directConnection=false", "retryWrites=true", "w=majority"])
|
|
94
|
-
|
|
95
|
-
uri_suffix = f"?{'&'.join(uri_params)}" if uri_params else ""
|
|
96
|
-
|
|
97
|
-
if username and password:
|
|
98
|
-
uri = f"mongodb://{username}:{password}@{host}:{port}/admin{uri_suffix}"
|
|
99
|
-
else:
|
|
100
|
-
uri = f"mongodb://{host}:{port}{uri_suffix}"
|
|
101
|
-
|
|
102
|
-
client = MongoClient(
|
|
103
|
-
uri, serverSelectionTimeoutMS=30000, connectTimeoutMS=20000, socketTimeoutMS=20000, maxPoolSize=1
|
|
104
|
-
)
|
|
105
|
-
db = client[database]
|
|
106
|
-
collection = db["MetaObjects"]
|
|
107
|
-
|
|
108
|
-
document = doc_parameter
|
|
109
|
-
|
|
110
|
-
table = Table(title="Resultados da Extração")
|
|
111
|
-
table.add_column("Documento")
|
|
112
|
-
table.add_column("Hook")
|
|
113
|
-
table.add_column("View")
|
|
114
|
-
table.add_column("List")
|
|
115
|
-
table.add_column("Pivot")
|
|
116
|
-
table.add_column("Access")
|
|
117
|
-
|
|
118
|
-
if document is None and not extract_all:
|
|
119
|
-
document_names = list(
|
|
120
|
-
collection.find({"type": {"$in": ["composite", "document"]}}, {"name": 1}).sort("name", 1)
|
|
121
|
-
)
|
|
122
|
-
choices = [{"name": "Todos", "value": "all"}] + [
|
|
123
|
-
{"name": doc["name"], "value": doc["name"]} for doc in document_names
|
|
124
|
-
]
|
|
125
|
-
|
|
126
|
-
questions = [
|
|
127
|
-
inquirer.List(
|
|
128
|
-
"document", message="Qual documento você precisa?", choices=[choice["name"] for choice in choices]
|
|
129
|
-
)
|
|
130
|
-
]
|
|
131
|
-
answers = inquirer.prompt(questions)
|
|
132
|
-
if answers is None:
|
|
133
|
-
console.print("[red]Operação cancelada pelo usuário[/red]")
|
|
134
|
-
return
|
|
135
|
-
document = "all" if answers["document"] == "Todos" else answers["document"]
|
|
136
|
-
elif extract_all:
|
|
137
|
-
document = "all"
|
|
138
|
-
|
|
139
|
-
filter_query: Dict[str, Any] = {"type": {"$in": ["composite", "document"]}}
|
|
140
|
-
if document != "all":
|
|
141
|
-
filter_query["name"] = document
|
|
142
|
-
|
|
143
|
-
metas = list(collection.find(filter_query).sort("_id", 1))
|
|
144
|
-
|
|
145
|
-
with Progress() as progress:
|
|
146
|
-
task = progress.add_task("[cyan]Processando...", total=len(metas))
|
|
147
|
-
|
|
148
|
-
for doc in metas:
|
|
149
|
-
doc_path = output_dir / doc["name"]
|
|
150
|
-
|
|
151
|
-
# Processando hooks
|
|
152
|
-
hook_results = []
|
|
153
|
-
if hook or all(x is None for x in [view, list_param, pivot, access]):
|
|
154
|
-
doc_meta = doc.copy()
|
|
155
|
-
for field in ["scriptBeforeValidation", "validationData", "validationScript", "scriptAfterSave"]:
|
|
156
|
-
doc_meta.pop(field, None)
|
|
157
|
-
|
|
158
|
-
if hook in (None, "validationData") and "validationData" in doc:
|
|
159
|
-
await write_file(
|
|
160
|
-
str(doc_path / "hook" / "validationData.json"), json.dumps(doc["validationData"], indent=2)
|
|
161
|
-
)
|
|
162
|
-
hook_results.append("✓ validationData")
|
|
163
|
-
|
|
164
|
-
for script_type in ["scriptBeforeValidation", "validationScript", "scriptAfterSave"]:
|
|
165
|
-
if script_type in doc and (hook is None or hook == script_type):
|
|
166
|
-
formatted = format_code(f"{script_type}.js", doc[script_type])
|
|
167
|
-
await write_file(str(doc_path / "hook" / f"{script_type}.js"), formatted)
|
|
168
|
-
hook_results.append(f"✓ {script_type}")
|
|
169
|
-
|
|
170
|
-
if hook is None:
|
|
171
|
-
await write_file(str(doc_path / "document.json"), json.dumps(doc_meta, indent=2))
|
|
172
|
-
|
|
173
|
-
# Processando views, lists, pivots e access
|
|
174
|
-
related_results: RelatedResults = {"view": [], "list": [], "pivot": [], "access": []}
|
|
175
|
-
if all(x is None for x in [hook, view, list_param, pivot, access]) or any(
|
|
176
|
-
x is not None for x in [view, list_param, pivot, access]
|
|
177
|
-
):
|
|
178
|
-
MONGO_OR = "$or"
|
|
179
|
-
meta_filter = {"document": doc["name"]}
|
|
180
|
-
meta_filter[MONGO_OR] = []
|
|
181
|
-
|
|
182
|
-
if any(x is not None for x in [view, list_param, pivot, access]):
|
|
183
|
-
for type_name, param in [
|
|
184
|
-
("view", view),
|
|
185
|
-
("list", list_param),
|
|
186
|
-
("pivot", pivot),
|
|
187
|
-
("access", access),
|
|
188
|
-
]:
|
|
189
|
-
if param is not None:
|
|
190
|
-
condition = {"type": type_name}
|
|
191
|
-
if param != "all":
|
|
192
|
-
condition["name"] = param
|
|
193
|
-
meta_filter[MONGO_OR].append(condition)
|
|
194
|
-
|
|
195
|
-
related_metas = list(collection.find(meta_filter).sort("_id", 1))
|
|
196
|
-
|
|
197
|
-
for meta in related_metas:
|
|
198
|
-
meta_type = cast(MetaType, meta["type"])
|
|
199
|
-
if meta_type in ("view", "list", "pivot", "access"):
|
|
200
|
-
await write_file(str(doc_path / meta_type / f"{meta['name']}.json"), json.dumps(meta, indent=2))
|
|
201
|
-
related_results[meta_type].append(f"✓ {meta['name']}")
|
|
202
|
-
|
|
203
|
-
table.add_row(
|
|
204
|
-
doc["name"],
|
|
205
|
-
"\n".join(hook_results),
|
|
206
|
-
"\n".join(related_results["view"]),
|
|
207
|
-
"\n".join(related_results["list"]),
|
|
208
|
-
"\n".join(related_results["pivot"]),
|
|
209
|
-
"\n".join(related_results["access"]),
|
|
210
|
-
)
|
|
211
|
-
|
|
212
|
-
progress.update(task, advance=1)
|
|
213
|
-
|
|
214
|
-
client.close()
|
|
215
|
-
console.print(f"[green]Extração concluída com sucesso do banco[/green] [cyan]{database}[/cyan]")
|
|
216
|
-
if document != "all":
|
|
217
|
-
console.print(f"[cyan]Documento: {document}[/cyan]")
|
|
218
|
-
|
|
219
|
-
console.print(table)
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
def main():
|
|
223
|
-
"""Função principal para execução via linha de comando."""
|
|
224
|
-
import argparse
|
|
225
|
-
import asyncio
|
|
226
|
-
import sys
|
|
227
|
-
|
|
228
|
-
parser = argparse.ArgumentParser(description="Extrai dados do MongoDB")
|
|
229
|
-
parser.add_argument("--host", default="localhost", help="Host do MongoDB")
|
|
230
|
-
parser.add_argument("--port", type=int, default=27017, help="Porta do MongoDB")
|
|
231
|
-
parser.add_argument("--database", required=True, help="Nome do banco de dados")
|
|
232
|
-
parser.add_argument("--output", default="metadata", help="Diretório de saída")
|
|
233
|
-
parser.add_argument("--view", help="Nome da view específica para extrair")
|
|
234
|
-
parser.add_argument("--list", dest="list_param", help="Nome da lista específica para extrair")
|
|
235
|
-
parser.add_argument("--pivot", help="Nome do pivot específico para extrair")
|
|
236
|
-
parser.add_argument("--access", help="Nome do access específica para extrair")
|
|
237
|
-
parser.add_argument("--hook", help="Nome do hook específico para extrair")
|
|
238
|
-
parser.add_argument("--username", help="Usuário do MongoDB")
|
|
239
|
-
parser.add_argument("--password", help="Senha do MongoDB")
|
|
240
|
-
parser.add_argument("--replicaset", help="Nome do ReplicaSet do MongoDB (ex: rs0)")
|
|
241
|
-
parser.add_argument("collection", nargs="?", help="Nome da collection específica para extrair")
|
|
242
|
-
parser.add_argument("--all", action="store_true", help="Extrair todas as collections sem perguntar")
|
|
243
|
-
|
|
244
|
-
args = parser.parse_args()
|
|
245
|
-
args_dict = vars(args)
|
|
246
|
-
|
|
247
|
-
# Se collection foi especificada, usa como doc_parameter
|
|
248
|
-
if "collection" in args_dict:
|
|
249
|
-
args_dict["doc_parameter"] = args_dict.pop("collection")
|
|
250
|
-
|
|
251
|
-
# Renomeia o parâmetro all para extract_all
|
|
252
|
-
if "all" in args_dict:
|
|
253
|
-
args_dict["extract_all"] = args_dict.pop("all")
|
|
254
|
-
|
|
255
|
-
if sys.platform == "win32":
|
|
256
|
-
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
|
|
257
|
-
|
|
258
|
-
asyncio.run(pull_command(**args_dict))
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
if __name__ == "__main__":
|
|
262
|
-
main()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|