comfygit 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfygit-0.3.1.dist-info/METADATA +654 -0
- comfygit-0.3.1.dist-info/RECORD +30 -0
- comfygit-0.3.1.dist-info/WHEEL +4 -0
- comfygit-0.3.1.dist-info/entry_points.txt +3 -0
- comfygit-0.3.1.dist-info/licenses/LICENSE.txt +661 -0
- comfygit_cli/__init__.py +12 -0
- comfygit_cli/__main__.py +6 -0
- comfygit_cli/cli.py +704 -0
- comfygit_cli/cli_utils.py +32 -0
- comfygit_cli/completers.py +239 -0
- comfygit_cli/completion_commands.py +246 -0
- comfygit_cli/env_commands.py +2701 -0
- comfygit_cli/formatters/__init__.py +5 -0
- comfygit_cli/formatters/error_formatter.py +141 -0
- comfygit_cli/global_commands.py +1806 -0
- comfygit_cli/interactive/__init__.py +1 -0
- comfygit_cli/logging/compressed_handler.py +150 -0
- comfygit_cli/logging/environment_logger.py +554 -0
- comfygit_cli/logging/log_compressor.py +101 -0
- comfygit_cli/logging/logging_config.py +97 -0
- comfygit_cli/resolution_strategies.py +89 -0
- comfygit_cli/strategies/__init__.py +1 -0
- comfygit_cli/strategies/conflict_resolver.py +113 -0
- comfygit_cli/strategies/interactive.py +843 -0
- comfygit_cli/strategies/rollback.py +40 -0
- comfygit_cli/utils/__init__.py +12 -0
- comfygit_cli/utils/civitai_errors.py +9 -0
- comfygit_cli/utils/orchestrator.py +252 -0
- comfygit_cli/utils/pagination.py +82 -0
- comfygit_cli/utils/progress.py +128 -0
|
@@ -0,0 +1,1806 @@
|
|
|
1
|
+
"""Global workspace-level commands for ComfyGit CLI."""
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import sys
|
|
5
|
+
from functools import cached_property
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from comfygit_core.core.workspace import Workspace
|
|
9
|
+
from comfygit_core.factories.workspace_factory import WorkspaceFactory
|
|
10
|
+
from comfygit_core.models.protocols import ExportCallbacks, ImportCallbacks
|
|
11
|
+
|
|
12
|
+
from .cli_utils import get_workspace_or_exit
|
|
13
|
+
from .logging.environment_logger import WorkspaceLogger, with_workspace_logging
|
|
14
|
+
from .logging.logging_config import get_logger
|
|
15
|
+
from .utils import create_progress_callback, paginate, show_civitai_auth_help, show_download_stats
|
|
16
|
+
|
|
17
|
+
logger = get_logger(__name__)
|
|
18
|
+
|
|
19
|
+
# Default system nodes to install with new workspaces.
|
|
20
|
+
# These are infrastructure custom nodes that provide management capabilities.
|
|
21
|
+
# Use `cg init --bare` to skip installation.
|
|
22
|
+
DEFAULT_SYSTEM_NODES = {
|
|
23
|
+
"comfygit-manager": {
|
|
24
|
+
"url": "https://github.com/comfyhub-org/comfygit-manager.git",
|
|
25
|
+
"description": "ComfyGit management panel for ComfyUI",
|
|
26
|
+
},
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class GlobalCommands:
|
|
31
|
+
"""Handler for global workspace commands."""
|
|
32
|
+
|
|
33
|
+
def __init__(self) -> None:
|
|
34
|
+
"""Initialize global commands handler."""
|
|
35
|
+
pass
|
|
36
|
+
|
|
37
|
+
@cached_property
|
|
38
|
+
def workspace(self) -> Workspace:
|
|
39
|
+
return get_workspace_or_exit()
|
|
40
|
+
|
|
41
|
+
def _get_or_create_workspace(self, args: argparse.Namespace) -> Workspace:
|
|
42
|
+
"""Get existing workspace or initialize a new one with user confirmation.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
args: Command arguments, must have 'yes' attribute for non-interactive mode
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
Workspace instance (existing or newly created)
|
|
49
|
+
"""
|
|
50
|
+
from comfygit_core.factories.workspace_factory import WorkspaceFactory
|
|
51
|
+
from comfygit_core.models.exceptions import CDWorkspaceNotFoundError
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
workspace = WorkspaceFactory.find()
|
|
55
|
+
WorkspaceLogger.set_workspace_path(workspace.path)
|
|
56
|
+
return workspace
|
|
57
|
+
|
|
58
|
+
except CDWorkspaceNotFoundError:
|
|
59
|
+
# Determine if we should auto-init
|
|
60
|
+
use_yes = getattr(args, 'yes', False)
|
|
61
|
+
|
|
62
|
+
if not use_yes:
|
|
63
|
+
# Interactive: ask user
|
|
64
|
+
response = input("\n✗ Workspace not initialized. Initialize now? [Y/n]: ").strip().lower()
|
|
65
|
+
if response in ['n', 'no']:
|
|
66
|
+
print("Operation cancelled. Run 'cg init' to initialize workspace manually.")
|
|
67
|
+
sys.exit(1)
|
|
68
|
+
else:
|
|
69
|
+
# Non-interactive: inform user
|
|
70
|
+
print("\n📦 No workspace found. Initializing with defaults...")
|
|
71
|
+
|
|
72
|
+
# Run init flow
|
|
73
|
+
init_args = argparse.Namespace(
|
|
74
|
+
path=None, # Use default (or COMFYGIT_HOME)
|
|
75
|
+
models_dir=None,
|
|
76
|
+
yes=use_yes # Pass through --yes flag
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
self.init(init_args)
|
|
80
|
+
|
|
81
|
+
# Get the newly created workspace
|
|
82
|
+
workspace = WorkspaceFactory.find()
|
|
83
|
+
WorkspaceLogger.set_workspace_path(workspace.path)
|
|
84
|
+
|
|
85
|
+
print("\n✓ Workspace initialized! Continuing with command...\n")
|
|
86
|
+
return workspace
|
|
87
|
+
|
|
88
|
+
def init(self, args: argparse.Namespace) -> None:
|
|
89
|
+
"""Initialize a new ComfyGit workspace.
|
|
90
|
+
|
|
91
|
+
Creates:
|
|
92
|
+
- ~/comfygit/ (or custom path)
|
|
93
|
+
- .metadata/ for workspace state
|
|
94
|
+
- uv_cache/ for package management
|
|
95
|
+
- environments/ for ComfyUI environments
|
|
96
|
+
"""
|
|
97
|
+
from pathlib import Path
|
|
98
|
+
|
|
99
|
+
# Validate models directory if provided (before creating workspace)
|
|
100
|
+
explicit_models_dir = getattr(args, 'models_dir', None)
|
|
101
|
+
if explicit_models_dir:
|
|
102
|
+
models_path = explicit_models_dir.resolve()
|
|
103
|
+
if not models_path.exists() or not models_path.is_dir():
|
|
104
|
+
print(f"✗ Models directory not found: {models_path}", file=sys.stderr)
|
|
105
|
+
print(" Falling back to default models directory\n")
|
|
106
|
+
# Clear the flag and enable --yes to avoid interactive prompt
|
|
107
|
+
args.models_dir = None
|
|
108
|
+
args.yes = True
|
|
109
|
+
|
|
110
|
+
# Determine workspace path
|
|
111
|
+
path = args.path if (hasattr(args, "path") and args.path) else None
|
|
112
|
+
|
|
113
|
+
workspace_paths = WorkspaceFactory.get_paths(path)
|
|
114
|
+
|
|
115
|
+
print(f"\n🎯 Initializing ComfyGit workspace at: {workspace_paths.root}")
|
|
116
|
+
|
|
117
|
+
try:
|
|
118
|
+
# Create workspace
|
|
119
|
+
workspace = WorkspaceFactory.create(workspace_paths.root)
|
|
120
|
+
|
|
121
|
+
# Set workspace path for logging after creation
|
|
122
|
+
WorkspaceLogger.set_workspace_path(workspace.path)
|
|
123
|
+
|
|
124
|
+
# Now log this command with the workspace logger
|
|
125
|
+
with WorkspaceLogger.log_command("init", arg_path=path if path else "default"):
|
|
126
|
+
logger.info(f"Workspace initialized at {workspace.path}")
|
|
127
|
+
|
|
128
|
+
# Fetch registry data for the new workspace
|
|
129
|
+
print("📦 Fetching latest registry data...")
|
|
130
|
+
success = workspace.update_registry_data()
|
|
131
|
+
if success:
|
|
132
|
+
print("✓ Registry data downloaded")
|
|
133
|
+
logger.info("Registry data downloaded successfully")
|
|
134
|
+
else:
|
|
135
|
+
print("⚠️ Could not fetch registry data")
|
|
136
|
+
print(" Some features will be limited until registry data is available:")
|
|
137
|
+
print(" • Automatic node resolution from workflow files")
|
|
138
|
+
print(" • Node package search and discovery")
|
|
139
|
+
print("")
|
|
140
|
+
print(" Download later with: cg registry update")
|
|
141
|
+
logger.warning("Failed to fetch initial registry data")
|
|
142
|
+
|
|
143
|
+
print(f"✓ Workspace initialized at {workspace.path}")
|
|
144
|
+
|
|
145
|
+
# Install default system nodes (unless --bare)
|
|
146
|
+
if not getattr(args, 'bare', False):
|
|
147
|
+
self._install_system_nodes(workspace)
|
|
148
|
+
else:
|
|
149
|
+
print("📦 Skipping system node installation (--bare flag)")
|
|
150
|
+
|
|
151
|
+
# Handle models directory setup
|
|
152
|
+
self._setup_models_directory(workspace, args)
|
|
153
|
+
|
|
154
|
+
# Show environment variable setup if custom path was used
|
|
155
|
+
if path:
|
|
156
|
+
self._show_workspace_env_setup(workspace.path)
|
|
157
|
+
|
|
158
|
+
print("\nNext steps:")
|
|
159
|
+
print(" 1. Create an environment: cg create <name>")
|
|
160
|
+
print(" 2. Add custom nodes: cg -e <name> node add <node>")
|
|
161
|
+
print(" 3. Run ComfyUI: cg -e <name> run")
|
|
162
|
+
except Exception as e:
|
|
163
|
+
print(f"✗ Failed to initialize workspace: {e}", file=sys.stderr)
|
|
164
|
+
sys.exit(1)
|
|
165
|
+
|
|
166
|
+
def _install_system_nodes(self, workspace: Workspace) -> None:
|
|
167
|
+
"""Install default system nodes (comfygit-manager) into workspace.
|
|
168
|
+
|
|
169
|
+
System nodes are infrastructure custom nodes that:
|
|
170
|
+
- Live at workspace level (.metadata/system_nodes/)
|
|
171
|
+
- Are symlinked into every environment
|
|
172
|
+
- Are never tracked in pyproject.toml
|
|
173
|
+
"""
|
|
174
|
+
from comfygit_core.utils.git import git_clone
|
|
175
|
+
|
|
176
|
+
system_nodes_path = workspace.paths.system_nodes
|
|
177
|
+
|
|
178
|
+
for node_name, node_config in DEFAULT_SYSTEM_NODES.items():
|
|
179
|
+
target_path = system_nodes_path / node_name
|
|
180
|
+
|
|
181
|
+
if target_path.exists():
|
|
182
|
+
logger.debug(f"System node '{node_name}' already exists, skipping")
|
|
183
|
+
continue
|
|
184
|
+
|
|
185
|
+
print(f"📦 Installing system node: {node_name}")
|
|
186
|
+
try:
|
|
187
|
+
git_clone(
|
|
188
|
+
url=node_config["url"],
|
|
189
|
+
target_path=target_path,
|
|
190
|
+
depth=1 # Shallow clone for speed
|
|
191
|
+
)
|
|
192
|
+
print(f" ✓ Installed {node_name}")
|
|
193
|
+
logger.info(f"Installed system node: {node_name}")
|
|
194
|
+
except Exception as e:
|
|
195
|
+
print(f" ⚠️ Failed to install {node_name}: {e}")
|
|
196
|
+
print(f" You can install it manually later")
|
|
197
|
+
logger.warning(f"Failed to install system node {node_name}: {e}")
|
|
198
|
+
|
|
199
|
+
def _show_workspace_env_setup(self, workspace_path: Path) -> None:
|
|
200
|
+
"""Show instructions for setting COMFYGIT_HOME for custom workspace location."""
|
|
201
|
+
import os
|
|
202
|
+
|
|
203
|
+
print("\n" + "="*70)
|
|
204
|
+
print("⚠️ CUSTOM WORKSPACE LOCATION")
|
|
205
|
+
print("="*70)
|
|
206
|
+
print(f"\nWorkspace created at: {workspace_path}")
|
|
207
|
+
print("\nTo use this workspace in future sessions, set COMFYGIT_HOME:")
|
|
208
|
+
|
|
209
|
+
# Detect shell and suggest appropriate config file
|
|
210
|
+
shell = os.environ.get('SHELL', '')
|
|
211
|
+
if 'bash' in shell:
|
|
212
|
+
config_file = "~/.bashrc"
|
|
213
|
+
elif 'zsh' in shell:
|
|
214
|
+
config_file = "~/.zshrc"
|
|
215
|
+
elif 'fish' in shell:
|
|
216
|
+
config_file = "~/.config/fish/config.fish"
|
|
217
|
+
else:
|
|
218
|
+
config_file = "your shell profile"
|
|
219
|
+
|
|
220
|
+
print(f"\nAdd to {config_file}:")
|
|
221
|
+
print(f' export COMFYGIT_HOME="{workspace_path}"')
|
|
222
|
+
print("\nOr set temporarily in current session:")
|
|
223
|
+
print(f' export COMFYGIT_HOME="{workspace_path}"')
|
|
224
|
+
print("\n" + "="*70)
|
|
225
|
+
|
|
226
|
+
def _setup_models_directory(self, workspace: Workspace, args: argparse.Namespace) -> None:
|
|
227
|
+
"""Handle interactive or automatic models directory setup during init.
|
|
228
|
+
|
|
229
|
+
Args:
|
|
230
|
+
workspace: The newly created workspace
|
|
231
|
+
args: CLI arguments containing models_dir and yes flags
|
|
232
|
+
"""
|
|
233
|
+
from pathlib import Path
|
|
234
|
+
from comfygit_cli.utils.progress import create_model_sync_progress
|
|
235
|
+
from comfygit_core.utils.common import format_size
|
|
236
|
+
|
|
237
|
+
# Check for explicit flags
|
|
238
|
+
use_interactive = not getattr(args, 'yes', False)
|
|
239
|
+
explicit_models_dir = getattr(args, 'models_dir', None)
|
|
240
|
+
|
|
241
|
+
# If explicit models dir provided via flag (already validated in init)
|
|
242
|
+
if explicit_models_dir:
|
|
243
|
+
models_path = explicit_models_dir.resolve()
|
|
244
|
+
print(f"\n📁 Setting models directory: {models_path}")
|
|
245
|
+
self._scan_and_set_models_dir(workspace, models_path)
|
|
246
|
+
return
|
|
247
|
+
|
|
248
|
+
# If --yes flag, use default silently
|
|
249
|
+
if not use_interactive:
|
|
250
|
+
self._show_default_models_dir(workspace)
|
|
251
|
+
return
|
|
252
|
+
|
|
253
|
+
# Interactive mode
|
|
254
|
+
print("\n📦 Model Directory Setup")
|
|
255
|
+
print("\nComfyGit needs a directory to index your models.")
|
|
256
|
+
print("\nOptions:")
|
|
257
|
+
print(" 1. Point to an existing ComfyUI models directory (recommended)")
|
|
258
|
+
print(" → Access all your existing models immediately")
|
|
259
|
+
print(f" → Example: ~/ComfyUI/models")
|
|
260
|
+
print("\n 2. Use the default empty directory")
|
|
261
|
+
print(f" → ComfyGit created: {workspace.paths.models}")
|
|
262
|
+
print(" → Download models as needed later")
|
|
263
|
+
|
|
264
|
+
has_existing = input("\nDo you have an existing ComfyUI models directory? (y/N): ").strip().lower()
|
|
265
|
+
|
|
266
|
+
if has_existing == 'y':
|
|
267
|
+
while True:
|
|
268
|
+
models_input = input("Enter path to models directory: ").strip()
|
|
269
|
+
|
|
270
|
+
if not models_input:
|
|
271
|
+
print("Using default directory instead")
|
|
272
|
+
self._show_default_models_dir(workspace)
|
|
273
|
+
return
|
|
274
|
+
|
|
275
|
+
models_path = Path(models_input).expanduser().resolve()
|
|
276
|
+
|
|
277
|
+
# Validate directory exists
|
|
278
|
+
if not models_path.exists():
|
|
279
|
+
print(f"✗ Directory not found: {models_path}")
|
|
280
|
+
retry = input("Try another path? (y/N): ").strip().lower()
|
|
281
|
+
if retry != 'y':
|
|
282
|
+
print("Using default directory instead")
|
|
283
|
+
self._show_default_models_dir(workspace)
|
|
284
|
+
return
|
|
285
|
+
continue
|
|
286
|
+
|
|
287
|
+
if not models_path.is_dir():
|
|
288
|
+
print(f"✗ Not a directory: {models_path}")
|
|
289
|
+
retry = input("Try another path? (y/N): ").strip().lower()
|
|
290
|
+
if retry != 'y':
|
|
291
|
+
print("Using default directory instead")
|
|
292
|
+
self._show_default_models_dir(workspace)
|
|
293
|
+
return
|
|
294
|
+
continue
|
|
295
|
+
|
|
296
|
+
# Auto-detect if they entered ComfyUI root instead of models subdir
|
|
297
|
+
if (models_path / "models").exists() and models_path.name != "models":
|
|
298
|
+
print(f"\n⚠️ Detected ComfyUI installation at: {models_path}")
|
|
299
|
+
use_subdir = input(f"Use models/ subdirectory instead? (Y/n): ").strip().lower()
|
|
300
|
+
if use_subdir != 'n':
|
|
301
|
+
models_path = models_path / "models"
|
|
302
|
+
print(f"Using: {models_path}")
|
|
303
|
+
|
|
304
|
+
# Scan and confirm
|
|
305
|
+
print(f"\nScanning {models_path}...")
|
|
306
|
+
self._scan_and_set_models_dir(workspace, models_path)
|
|
307
|
+
return
|
|
308
|
+
else:
|
|
309
|
+
# User chose default
|
|
310
|
+
self._show_default_models_dir(workspace)
|
|
311
|
+
|
|
312
|
+
def _show_default_models_dir(self, workspace: Workspace) -> None:
|
|
313
|
+
"""Show the default models directory message."""
|
|
314
|
+
models_dir = workspace.get_models_directory()
|
|
315
|
+
print(f"\n✓ Using default models directory: {models_dir}")
|
|
316
|
+
print(" (Change later with: cg model index dir <path>)")
|
|
317
|
+
|
|
318
|
+
def _scan_and_set_models_dir(self, workspace: Workspace, models_path: Path) -> None:
|
|
319
|
+
"""Scan a models directory and set it as the workspace models directory.
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
workspace: The workspace instance
|
|
323
|
+
models_path: Path to the models directory to scan
|
|
324
|
+
"""
|
|
325
|
+
from comfygit_cli.utils.progress import create_model_sync_progress
|
|
326
|
+
from comfygit_core.utils.common import format_size
|
|
327
|
+
|
|
328
|
+
try:
|
|
329
|
+
progress = create_model_sync_progress()
|
|
330
|
+
workspace.set_models_directory(models_path, progress=progress)
|
|
331
|
+
|
|
332
|
+
# Get stats to show summary
|
|
333
|
+
stats = workspace.get_model_stats()
|
|
334
|
+
total_models = stats.get('total_models', 0)
|
|
335
|
+
|
|
336
|
+
if total_models > 0:
|
|
337
|
+
# Calculate total size
|
|
338
|
+
models = workspace.list_models()
|
|
339
|
+
total_size = sum(m.file_size for m in models)
|
|
340
|
+
|
|
341
|
+
print(f"\n✓ Models directory set: {models_path}")
|
|
342
|
+
print(f" Found {total_models} models ({format_size(total_size)})")
|
|
343
|
+
else:
|
|
344
|
+
print(f"\n✓ Models directory set: {models_path}")
|
|
345
|
+
print(" (No models found - directory is empty)")
|
|
346
|
+
except Exception as e:
|
|
347
|
+
logger.error(f"Failed to set models directory: {e}")
|
|
348
|
+
print(f"✗ Failed to scan models directory: {e}", file=sys.stderr)
|
|
349
|
+
print(" Using default models directory instead")
|
|
350
|
+
self._show_default_models_dir(workspace)
|
|
351
|
+
|
|
352
|
+
@with_workspace_logging("list")
|
|
353
|
+
def list_envs(self, args: argparse.Namespace) -> None:
|
|
354
|
+
"""List all environments in the workspace."""
|
|
355
|
+
logger.info("Listing environments in workspace")
|
|
356
|
+
|
|
357
|
+
try:
|
|
358
|
+
environments = self.workspace.list_environments()
|
|
359
|
+
active_env = self.workspace.get_active_environment()
|
|
360
|
+
active_name = active_env.name if active_env else None
|
|
361
|
+
|
|
362
|
+
logger.info(f"Found {len(environments)} environments, active: {active_name or 'none'}")
|
|
363
|
+
|
|
364
|
+
if not environments:
|
|
365
|
+
print("No environments found.")
|
|
366
|
+
print("Create one with: cg create <name>")
|
|
367
|
+
return
|
|
368
|
+
|
|
369
|
+
print("Environments:")
|
|
370
|
+
for env in environments:
|
|
371
|
+
marker = "✓" if env.name == active_name else " "
|
|
372
|
+
status = "(active)" if env.name == active_name else ""
|
|
373
|
+
print(f" {marker} {env.name:15} {status}")
|
|
374
|
+
|
|
375
|
+
except Exception as e:
|
|
376
|
+
logger.error(f"Failed to list environments: {e}")
|
|
377
|
+
print(f"✗ Failed to list environments: {e}", file=sys.stderr)
|
|
378
|
+
sys.exit(1)
|
|
379
|
+
|
|
380
|
+
def debug(self, args: argparse.Namespace) -> None:
|
|
381
|
+
"""Show application debug logs with smart environment detection."""
|
|
382
|
+
import re
|
|
383
|
+
|
|
384
|
+
# Smart detection: workspace flag > -e flag > active env > workspace fallback
|
|
385
|
+
if args.workspace:
|
|
386
|
+
log_file = self.workspace.paths.logs / "workspace" / "full.log"
|
|
387
|
+
log_source = "workspace"
|
|
388
|
+
elif hasattr(args, 'target_env') and args.target_env:
|
|
389
|
+
log_file = self.workspace.paths.logs / args.target_env / "full.log"
|
|
390
|
+
log_source = args.target_env
|
|
391
|
+
else:
|
|
392
|
+
active_env = self.workspace.get_active_environment()
|
|
393
|
+
if active_env:
|
|
394
|
+
log_file = self.workspace.paths.logs / active_env.name / "full.log"
|
|
395
|
+
log_source = active_env.name
|
|
396
|
+
else:
|
|
397
|
+
log_file = self.workspace.paths.logs / "workspace" / "full.log"
|
|
398
|
+
log_source = "workspace"
|
|
399
|
+
|
|
400
|
+
if not log_file.exists():
|
|
401
|
+
print(f"✗ No logs found for {log_source}")
|
|
402
|
+
print(f" Expected at: {log_file}")
|
|
403
|
+
return
|
|
404
|
+
|
|
405
|
+
# Read log lines
|
|
406
|
+
try:
|
|
407
|
+
with open(log_file, 'r', encoding='utf-8') as f:
|
|
408
|
+
lines = f.readlines()
|
|
409
|
+
except Exception as e:
|
|
410
|
+
print(f"✗ Failed to read log file: {e}", file=sys.stderr)
|
|
411
|
+
sys.exit(1)
|
|
412
|
+
|
|
413
|
+
# Group lines into complete log records (header + continuation lines)
|
|
414
|
+
log_pattern = re.compile(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3} - ')
|
|
415
|
+
records = []
|
|
416
|
+
current_record = []
|
|
417
|
+
|
|
418
|
+
for line in lines:
|
|
419
|
+
if log_pattern.match(line):
|
|
420
|
+
if current_record:
|
|
421
|
+
records.append(current_record)
|
|
422
|
+
current_record = [line]
|
|
423
|
+
else:
|
|
424
|
+
if current_record:
|
|
425
|
+
current_record.append(line)
|
|
426
|
+
else:
|
|
427
|
+
current_record = [line]
|
|
428
|
+
|
|
429
|
+
if current_record:
|
|
430
|
+
records.append(current_record)
|
|
431
|
+
|
|
432
|
+
# Filter by level if specified
|
|
433
|
+
if args.level:
|
|
434
|
+
records = [r for r in records if r and f" - {args.level} - " in r[0]]
|
|
435
|
+
|
|
436
|
+
# Apply line limit to records
|
|
437
|
+
if not args.full:
|
|
438
|
+
records = records[-args.lines:]
|
|
439
|
+
|
|
440
|
+
if not records:
|
|
441
|
+
print("No logs found matching criteria")
|
|
442
|
+
return
|
|
443
|
+
|
|
444
|
+
# Display
|
|
445
|
+
total_lines = sum(len(record) for record in records)
|
|
446
|
+
print(f"=== Logs for {log_source} ===")
|
|
447
|
+
print(f"Log file: {log_file}")
|
|
448
|
+
if args.level:
|
|
449
|
+
print(f"Level filter: {args.level}")
|
|
450
|
+
print(f"Showing: {len(records)} log records ({total_lines} lines)\n")
|
|
451
|
+
|
|
452
|
+
for record in records:
|
|
453
|
+
for line in record:
|
|
454
|
+
print(line.rstrip())
|
|
455
|
+
|
|
456
|
+
print(f"\n=== End of logs ===")
|
|
457
|
+
if not args.full and len(records) == args.lines:
|
|
458
|
+
print(f"Tip: Use --full to see all logs, or increase --lines to see more")
|
|
459
|
+
|
|
460
|
+
@with_workspace_logging("migrate")
|
|
461
|
+
def migrate(self, args: argparse.Namespace) -> None:
|
|
462
|
+
"""Migrate an existing ComfyUI installation (not implemented in MVP)."""
|
|
463
|
+
print("⚠️ Migration is not yet implemented in this MVP")
|
|
464
|
+
print("\nFor now, you can:")
|
|
465
|
+
print(" 1. Create a new environment: cg create <name>")
|
|
466
|
+
print(" 2. Manually add your custom nodes:")
|
|
467
|
+
print(" cg -e <name> node add <node-name-or-url>")
|
|
468
|
+
print(" 3. Apply changes: cg -e <name> sync")
|
|
469
|
+
|
|
470
|
+
# Still do a basic scan if requested
|
|
471
|
+
if args.scan_only:
|
|
472
|
+
source_path = Path(args.source_path)
|
|
473
|
+
if source_path.exists():
|
|
474
|
+
print(f"\n📋 Basic scan of: {source_path}")
|
|
475
|
+
|
|
476
|
+
# Check for ComfyUI
|
|
477
|
+
if (source_path / "main.py").exists():
|
|
478
|
+
print(" ✓ ComfyUI detected")
|
|
479
|
+
|
|
480
|
+
# Check for custom nodes
|
|
481
|
+
custom_nodes = source_path / "custom_nodes"
|
|
482
|
+
if custom_nodes.exists():
|
|
483
|
+
node_count = len([d for d in custom_nodes.iterdir() if d.is_dir()])
|
|
484
|
+
print(f" ✓ Found {node_count} custom nodes")
|
|
485
|
+
|
|
486
|
+
# Check for models
|
|
487
|
+
models = source_path / "models"
|
|
488
|
+
if models.exists():
|
|
489
|
+
print(" ✓ Models directory found")
|
|
490
|
+
else:
|
|
491
|
+
print(f"✗ Path not found: {source_path}")
|
|
492
|
+
|
|
493
|
+
@with_workspace_logging("import")
|
|
494
|
+
def import_env(self, args: argparse.Namespace) -> None:
|
|
495
|
+
"""Import a ComfyGit environment from a tarball or git repository."""
|
|
496
|
+
from pathlib import Path
|
|
497
|
+
|
|
498
|
+
from comfygit_core.utils.git import is_git_url
|
|
499
|
+
|
|
500
|
+
# Ensure workspace exists, creating it if necessary
|
|
501
|
+
workspace = self._get_or_create_workspace(args)
|
|
502
|
+
|
|
503
|
+
if not args.path:
|
|
504
|
+
print("✗ Please specify path to import tarball or git URL")
|
|
505
|
+
print(" Usage: cg import <path.tar.gz|git-url>")
|
|
506
|
+
sys.exit(1)
|
|
507
|
+
|
|
508
|
+
# Detect if this is a git URL or local tarball
|
|
509
|
+
is_git = is_git_url(args.path)
|
|
510
|
+
|
|
511
|
+
if is_git:
|
|
512
|
+
print("📦 Importing environment from git repository")
|
|
513
|
+
print(f" URL: {args.path}")
|
|
514
|
+
if hasattr(args, 'branch') and args.branch:
|
|
515
|
+
print(f" Branch/Tag: {args.branch}")
|
|
516
|
+
print()
|
|
517
|
+
else:
|
|
518
|
+
tarball_path = Path(args.path)
|
|
519
|
+
if not tarball_path.exists():
|
|
520
|
+
print(f"✗ File not found: {tarball_path}")
|
|
521
|
+
sys.exit(1)
|
|
522
|
+
print(f"📦 Importing environment from {tarball_path.name}")
|
|
523
|
+
print()
|
|
524
|
+
|
|
525
|
+
# Get environment name from args or prompt
|
|
526
|
+
if hasattr(args, 'name') and args.name:
|
|
527
|
+
env_name = args.name
|
|
528
|
+
else:
|
|
529
|
+
env_name = input("Environment name: ").strip()
|
|
530
|
+
if not env_name:
|
|
531
|
+
print("✗ Environment name required")
|
|
532
|
+
sys.exit(1)
|
|
533
|
+
|
|
534
|
+
# Ask for model download strategy
|
|
535
|
+
print("\nModel download strategy:")
|
|
536
|
+
print(" 1. all - Download all models with sources")
|
|
537
|
+
print(" 2. required - Download only required models")
|
|
538
|
+
print(" 3. skip - Skip all downloads (can resolve later)")
|
|
539
|
+
strategy_choice = input("Choice (1-3) [1]: ").strip() or "1"
|
|
540
|
+
|
|
541
|
+
strategy_map = {"1": "all", "2": "required", "3": "skip"}
|
|
542
|
+
strategy = strategy_map.get(strategy_choice, "all")
|
|
543
|
+
|
|
544
|
+
# CLI callbacks for progress updates
|
|
545
|
+
class CLIImportCallbacks(ImportCallbacks):
|
|
546
|
+
def __init__(self):
|
|
547
|
+
self.manifest = None
|
|
548
|
+
self.dep_group_successes = []
|
|
549
|
+
self.dep_group_failures = []
|
|
550
|
+
|
|
551
|
+
def on_phase(self, phase: str, description: str):
|
|
552
|
+
# Add emojis based on phase
|
|
553
|
+
emoji_map = {
|
|
554
|
+
"clone_repo": "📥",
|
|
555
|
+
"clone_comfyui": "🔧",
|
|
556
|
+
"restore_comfyui": "🔧",
|
|
557
|
+
"configure_pytorch": "🔧",
|
|
558
|
+
"install_deps": "🔧",
|
|
559
|
+
"init_git": "🔧",
|
|
560
|
+
"copy_workflows": "📝",
|
|
561
|
+
"sync_nodes": "📦",
|
|
562
|
+
"resolve_models": "🔄"
|
|
563
|
+
}
|
|
564
|
+
|
|
565
|
+
# First phase shows initialization header
|
|
566
|
+
if phase == "clone_repo":
|
|
567
|
+
print(f"\n📥 {description}")
|
|
568
|
+
elif phase in ["clone_comfyui", "restore_comfyui"]:
|
|
569
|
+
print("\n🔧 Initializing environment...")
|
|
570
|
+
print(f" {description}")
|
|
571
|
+
elif phase in ["install_deps", "init_git", "configure_pytorch"]:
|
|
572
|
+
print(f" {description}")
|
|
573
|
+
elif phase == "copy_workflows":
|
|
574
|
+
print("\n📝 Setting up workflows...")
|
|
575
|
+
elif phase == "sync_nodes":
|
|
576
|
+
print("\n📦 Syncing custom nodes...")
|
|
577
|
+
elif phase == "resolve_models":
|
|
578
|
+
print(f"\n🔄 {description}")
|
|
579
|
+
else:
|
|
580
|
+
emoji = emoji_map.get(phase, "")
|
|
581
|
+
print(f"\n{emoji} {description}" if emoji else f"\n{description}")
|
|
582
|
+
|
|
583
|
+
def on_dependency_group_start(self, group_name: str, is_optional: bool):
|
|
584
|
+
"""Show which dependency group is being installed."""
|
|
585
|
+
optional_marker = " (optional)" if is_optional else ""
|
|
586
|
+
print(f" Installing {group_name}{optional_marker}...", end="", flush=True)
|
|
587
|
+
|
|
588
|
+
def on_dependency_group_complete(self, group_name: str, success: bool, error: str | None = None):
|
|
589
|
+
"""Mark group as succeeded or failed."""
|
|
590
|
+
if success:
|
|
591
|
+
print(" ✓")
|
|
592
|
+
self.dep_group_successes.append(group_name)
|
|
593
|
+
else:
|
|
594
|
+
print(" ✗")
|
|
595
|
+
self.dep_group_failures.append((group_name, error or "Unknown error"))
|
|
596
|
+
|
|
597
|
+
def on_workflow_copied(self, workflow_name: str):
|
|
598
|
+
print(f" Copied: {workflow_name}")
|
|
599
|
+
|
|
600
|
+
def on_node_installed(self, node_name: str):
|
|
601
|
+
print(f" Installed: {node_name}")
|
|
602
|
+
|
|
603
|
+
def on_workflow_resolved(self, workflow_name: str, downloads: int):
|
|
604
|
+
print(f" • {workflow_name}", end="")
|
|
605
|
+
if downloads:
|
|
606
|
+
print(f" (downloaded {downloads} models)")
|
|
607
|
+
else:
|
|
608
|
+
print()
|
|
609
|
+
|
|
610
|
+
def on_error(self, error: str):
|
|
611
|
+
print(f" ⚠️ {error}")
|
|
612
|
+
|
|
613
|
+
def on_download_failures(self, failures: list[tuple[str, str]]):
|
|
614
|
+
if not failures:
|
|
615
|
+
return
|
|
616
|
+
|
|
617
|
+
print(f"\n⚠️ {len(failures)} model(s) failed to download:")
|
|
618
|
+
for workflow_name, model_name in failures:
|
|
619
|
+
print(f" • {model_name} (from {workflow_name})")
|
|
620
|
+
|
|
621
|
+
print("\nModels are saved as download intents - you can download them later with:")
|
|
622
|
+
print(" cg workflow resolve <workflow>")
|
|
623
|
+
print("\nIf you see 401 Unauthorized errors, add your Civitai API key:")
|
|
624
|
+
print(" cg config --civitai-key <your-token>")
|
|
625
|
+
|
|
626
|
+
def on_download_batch_start(self, count: int):
|
|
627
|
+
"""Show batch download start."""
|
|
628
|
+
print(f"\n⬇️ Downloading {count} model(s)...")
|
|
629
|
+
|
|
630
|
+
def on_download_file_start(self, name: str, idx: int, total: int):
|
|
631
|
+
"""Show individual file download start."""
|
|
632
|
+
print(f"\n[{idx}/{total}] {name}")
|
|
633
|
+
|
|
634
|
+
def on_download_file_progress(self, downloaded: int, total: int | None):
|
|
635
|
+
"""Show download progress bar."""
|
|
636
|
+
downloaded_mb = downloaded / (1024 * 1024)
|
|
637
|
+
if total:
|
|
638
|
+
total_mb = total / (1024 * 1024)
|
|
639
|
+
pct = (downloaded / total) * 100
|
|
640
|
+
print(f"\rDownloading... {downloaded_mb:.1f} MB / {total_mb:.1f} MB ({pct:.0f}%)", end='', flush=True)
|
|
641
|
+
else:
|
|
642
|
+
print(f"\rDownloading... {downloaded_mb:.1f} MB", end='', flush=True)
|
|
643
|
+
|
|
644
|
+
def on_download_file_complete(self, name: str, success: bool, error: str | None):
|
|
645
|
+
"""Show file download completion."""
|
|
646
|
+
if success:
|
|
647
|
+
print(" ✓ Complete")
|
|
648
|
+
else:
|
|
649
|
+
print(f" ✗ Failed: {error}")
|
|
650
|
+
|
|
651
|
+
def on_download_batch_complete(self, success: int, total: int):
|
|
652
|
+
"""Show batch download completion."""
|
|
653
|
+
if success == total:
|
|
654
|
+
print(f"\n✅ Downloaded {total} model(s)")
|
|
655
|
+
elif success > 0:
|
|
656
|
+
print(f"\n⚠️ Downloaded {success}/{total} models (some failed)")
|
|
657
|
+
else:
|
|
658
|
+
print(f"\n❌ All downloads failed (0/{total})")
|
|
659
|
+
|
|
660
|
+
callbacks_instance = CLIImportCallbacks()
|
|
661
|
+
|
|
662
|
+
try:
|
|
663
|
+
if is_git:
|
|
664
|
+
env = workspace.import_from_git(
|
|
665
|
+
git_url=args.path,
|
|
666
|
+
name=env_name,
|
|
667
|
+
model_strategy=strategy,
|
|
668
|
+
branch=getattr(args, 'branch', None),
|
|
669
|
+
callbacks=callbacks_instance,
|
|
670
|
+
torch_backend=args.torch_backend,
|
|
671
|
+
)
|
|
672
|
+
else:
|
|
673
|
+
env = workspace.import_environment(
|
|
674
|
+
tarball_path=Path(args.path),
|
|
675
|
+
name=env_name,
|
|
676
|
+
model_strategy=strategy,
|
|
677
|
+
callbacks=callbacks_instance,
|
|
678
|
+
torch_backend=args.torch_backend,
|
|
679
|
+
)
|
|
680
|
+
|
|
681
|
+
print(f"\n✅ Import complete: {env.name}")
|
|
682
|
+
|
|
683
|
+
# Show dependency group summary if any failed
|
|
684
|
+
if callbacks_instance.dep_group_failures:
|
|
685
|
+
print("\n⚠️ Some optional dependency groups failed to install:")
|
|
686
|
+
for group_name, error in callbacks_instance.dep_group_failures:
|
|
687
|
+
print(f" ✗ {group_name}")
|
|
688
|
+
print("\nSome functionality may be degraded or some nodes may not work properly.")
|
|
689
|
+
print("The environment will still function with reduced capabilities.")
|
|
690
|
+
else:
|
|
691
|
+
print(" Environment ready to use!")
|
|
692
|
+
|
|
693
|
+
# Set as active if --use flag provided
|
|
694
|
+
if hasattr(args, 'use') and args.use:
|
|
695
|
+
workspace.set_active_environment(env.name)
|
|
696
|
+
print(f" '{env.name}' set as active environment")
|
|
697
|
+
else:
|
|
698
|
+
print(f"\nActivate with: cg use {env_name}")
|
|
699
|
+
|
|
700
|
+
except Exception as e:
|
|
701
|
+
print(f"\n✗ Import failed: {e}")
|
|
702
|
+
sys.exit(1)
|
|
703
|
+
|
|
704
|
+
sys.exit(0)
|
|
705
|
+
|
|
706
|
+
@with_workspace_logging("export")
|
|
707
|
+
def export_env(self, args: argparse.Namespace) -> None:
|
|
708
|
+
"""Export a ComfyGit environment to a package."""
|
|
709
|
+
from datetime import datetime
|
|
710
|
+
from pathlib import Path
|
|
711
|
+
|
|
712
|
+
# Get active environment or from -e flag
|
|
713
|
+
try:
|
|
714
|
+
if hasattr(args, 'target_env') and args.target_env:
|
|
715
|
+
env = self.workspace.get_environment(args.target_env)
|
|
716
|
+
else:
|
|
717
|
+
env = self.workspace.get_active_environment()
|
|
718
|
+
if not env:
|
|
719
|
+
print("✗ No active environment. Use: cg use <name>")
|
|
720
|
+
print(" Or specify with: cg -e <name> export")
|
|
721
|
+
sys.exit(1)
|
|
722
|
+
except Exception as e:
|
|
723
|
+
print(f"✗ Error getting environment: {e}")
|
|
724
|
+
sys.exit(1)
|
|
725
|
+
|
|
726
|
+
# Determine output path
|
|
727
|
+
if args.path:
|
|
728
|
+
output_path = Path(args.path)
|
|
729
|
+
else:
|
|
730
|
+
# Default: <env_name>_export_<date>.tar.gz in current directory
|
|
731
|
+
timestamp = datetime.now().strftime("%Y%m%d")
|
|
732
|
+
output_path = Path.cwd() / f"{env.name}_export_{timestamp}.tar.gz"
|
|
733
|
+
|
|
734
|
+
print(f"📦 Exporting environment: {env.name}")
|
|
735
|
+
print()
|
|
736
|
+
|
|
737
|
+
# Export callbacks
|
|
738
|
+
class CLIExportCallbacks(ExportCallbacks):
|
|
739
|
+
def __init__(self):
|
|
740
|
+
self.models_without_sources = []
|
|
741
|
+
|
|
742
|
+
def on_models_without_sources(self, models: list):
|
|
743
|
+
self.models_without_sources = models
|
|
744
|
+
|
|
745
|
+
callbacks = CLIExportCallbacks()
|
|
746
|
+
|
|
747
|
+
try:
|
|
748
|
+
tarball_path = env.export_environment(output_path, callbacks=callbacks)
|
|
749
|
+
|
|
750
|
+
# Check if we need user confirmation
|
|
751
|
+
if callbacks.models_without_sources and not args.allow_issues:
|
|
752
|
+
print("⚠️ Export validation:")
|
|
753
|
+
print(f"\n{len(callbacks.models_without_sources)} model(s) have no source URLs.\n")
|
|
754
|
+
|
|
755
|
+
# Show first 3 models initially
|
|
756
|
+
shown_all = len(callbacks.models_without_sources) <= 3
|
|
757
|
+
|
|
758
|
+
def show_models(show_all=False):
|
|
759
|
+
if show_all or len(callbacks.models_without_sources) <= 3:
|
|
760
|
+
for model_info in callbacks.models_without_sources:
|
|
761
|
+
print(f" • {model_info.filename}")
|
|
762
|
+
workflows_str = ", ".join(model_info.workflows)
|
|
763
|
+
print(f" Used by: {workflows_str}")
|
|
764
|
+
else:
|
|
765
|
+
for model_info in callbacks.models_without_sources[:3]:
|
|
766
|
+
print(f" • {model_info.filename}")
|
|
767
|
+
workflows_str = ", ".join(model_info.workflows)
|
|
768
|
+
print(f" Used by: {workflows_str}")
|
|
769
|
+
remaining = len(callbacks.models_without_sources) - 3
|
|
770
|
+
print(f"\n ... and {remaining} more")
|
|
771
|
+
|
|
772
|
+
show_models()
|
|
773
|
+
|
|
774
|
+
print("\n⚠️ Recipients won't be able to download these models automatically.")
|
|
775
|
+
print(" Add sources: cg model add-source")
|
|
776
|
+
|
|
777
|
+
# Single confirmation loop
|
|
778
|
+
while True:
|
|
779
|
+
if shown_all or len(callbacks.models_without_sources) <= 3:
|
|
780
|
+
response = input("\nContinue export? (y/N): ").strip().lower()
|
|
781
|
+
else:
|
|
782
|
+
response = input("\nContinue export? (y/N) or (s)how all models: ").strip().lower()
|
|
783
|
+
|
|
784
|
+
if response == 's' and not shown_all:
|
|
785
|
+
print()
|
|
786
|
+
show_models(show_all=True)
|
|
787
|
+
shown_all = True
|
|
788
|
+
print("\n⚠️ Recipients won't be able to download these models automatically.")
|
|
789
|
+
print(" Add sources: cg model add-source")
|
|
790
|
+
continue
|
|
791
|
+
elif response == 'y':
|
|
792
|
+
break
|
|
793
|
+
else:
|
|
794
|
+
print("\n✗ Export cancelled")
|
|
795
|
+
print(" Fix with: cg model add-source")
|
|
796
|
+
# Clean up the created tarball
|
|
797
|
+
if tarball_path.exists():
|
|
798
|
+
tarball_path.unlink()
|
|
799
|
+
sys.exit(1)
|
|
800
|
+
|
|
801
|
+
size_mb = tarball_path.stat().st_size / (1024 * 1024)
|
|
802
|
+
print(f"\n✅ Export complete: {tarball_path.name} ({size_mb:.1f} MB)")
|
|
803
|
+
print("\nShare this file to distribute your complete environment!")
|
|
804
|
+
|
|
805
|
+
except Exception as e:
|
|
806
|
+
# Handle CDExportError with rich context
|
|
807
|
+
from comfygit_core.models.exceptions import CDExportError
|
|
808
|
+
|
|
809
|
+
if isinstance(e, CDExportError):
|
|
810
|
+
print(f"✗ {str(e)}")
|
|
811
|
+
|
|
812
|
+
# Show context-specific details
|
|
813
|
+
if e.context:
|
|
814
|
+
if e.context.uncommitted_workflows:
|
|
815
|
+
print("\n📋 Uncommitted workflows:")
|
|
816
|
+
for wf in e.context.uncommitted_workflows:
|
|
817
|
+
print(f" • {wf}")
|
|
818
|
+
print("\n💡 Commit first:")
|
|
819
|
+
print(" cg commit -m 'Pre-export checkpoint'")
|
|
820
|
+
elif e.context.uncommitted_git_changes:
|
|
821
|
+
print("\n💡 Commit git changes first:")
|
|
822
|
+
print(" cg commit -m 'Pre-export checkpoint'")
|
|
823
|
+
elif e.context.has_unresolved_issues:
|
|
824
|
+
print("\n💡 Resolve workflow issues first:")
|
|
825
|
+
print(" cg workflow resolve <workflow_name>")
|
|
826
|
+
sys.exit(1)
|
|
827
|
+
|
|
828
|
+
# Generic error handling
|
|
829
|
+
print(f"✗ Export failed: {e}")
|
|
830
|
+
sys.exit(1)
|
|
831
|
+
|
|
832
|
+
sys.exit(0)
|
|
833
|
+
|
|
834
|
+
# === Model Management Commands ===
|
|
835
|
+
|
|
836
|
+
@with_workspace_logging("model index list")
|
|
837
|
+
def model_index_list(self, args: argparse.Namespace) -> None:
|
|
838
|
+
"""List all indexed models."""
|
|
839
|
+
from collections import defaultdict
|
|
840
|
+
from pathlib import Path
|
|
841
|
+
|
|
842
|
+
from comfygit_core.utils.common import format_size
|
|
843
|
+
|
|
844
|
+
logger.info("Listing all indexed models")
|
|
845
|
+
|
|
846
|
+
try:
|
|
847
|
+
# Get all models from the index
|
|
848
|
+
models = self.workspace.list_models()
|
|
849
|
+
|
|
850
|
+
logger.info(f"Retrieved {len(models)} models from index")
|
|
851
|
+
|
|
852
|
+
if not models:
|
|
853
|
+
print("📦 All indexed models:")
|
|
854
|
+
print(" No models found")
|
|
855
|
+
print(" Run 'cg model index dir <path>' to set your models directory")
|
|
856
|
+
return
|
|
857
|
+
|
|
858
|
+
# Group models by hash to find duplicates
|
|
859
|
+
grouped = defaultdict(lambda: {'model': None, 'paths': []})
|
|
860
|
+
for model in models:
|
|
861
|
+
grouped[model.hash]['model'] = model
|
|
862
|
+
if model.base_directory:
|
|
863
|
+
full_path = Path(model.base_directory) / model.relative_path
|
|
864
|
+
else:
|
|
865
|
+
full_path = Path(model.relative_path)
|
|
866
|
+
grouped[model.hash]['paths'].append(full_path)
|
|
867
|
+
|
|
868
|
+
# Filter to duplicates if requested
|
|
869
|
+
if args.duplicates:
|
|
870
|
+
grouped = {h: g for h, g in grouped.items() if len(g['paths']) > 1}
|
|
871
|
+
if not grouped:
|
|
872
|
+
print("📦 No duplicate models found")
|
|
873
|
+
print(" All models exist in a single location")
|
|
874
|
+
return
|
|
875
|
+
|
|
876
|
+
# Convert to list for pagination
|
|
877
|
+
results = list(grouped.values())
|
|
878
|
+
|
|
879
|
+
# Define how to render a single model
|
|
880
|
+
def render_model(group):
|
|
881
|
+
model = group['model']
|
|
882
|
+
paths = group['paths']
|
|
883
|
+
size_str = format_size(model.file_size)
|
|
884
|
+
print(f"\n {model.filename}")
|
|
885
|
+
print(f" Size: {size_str}")
|
|
886
|
+
print(f" Hash: {model.hash[:12]}...")
|
|
887
|
+
if len(paths) == 1:
|
|
888
|
+
print(f" Path: {paths[0]}")
|
|
889
|
+
else:
|
|
890
|
+
print(f" Locations ({len(paths)}):")
|
|
891
|
+
for path in paths:
|
|
892
|
+
print(f" • {path}")
|
|
893
|
+
|
|
894
|
+
# Build header
|
|
895
|
+
stats = self.workspace.get_model_stats()
|
|
896
|
+
total_models = stats.get('total_models', 0)
|
|
897
|
+
total_locations = stats.get('total_locations', 0)
|
|
898
|
+
|
|
899
|
+
if args.duplicates:
|
|
900
|
+
duplicate_count = len(results)
|
|
901
|
+
duplicate_files = sum(len(g['paths']) for g in results)
|
|
902
|
+
header = f"📦 Duplicate models ({duplicate_count} models, {duplicate_files} files):"
|
|
903
|
+
else:
|
|
904
|
+
header = f"📦 All indexed models ({total_models} unique, {total_locations} files):"
|
|
905
|
+
|
|
906
|
+
paginate(results, render_model, page_size=5, header=header)
|
|
907
|
+
|
|
908
|
+
except Exception as e:
|
|
909
|
+
logger.error(f"Failed to list models: {e}")
|
|
910
|
+
print(f"✗ Failed to list models: {e}", file=sys.stderr)
|
|
911
|
+
sys.exit(1)
|
|
912
|
+
|
|
913
|
+
@with_workspace_logging("model index find")
|
|
914
|
+
def model_index_find(self, args: argparse.Namespace) -> None:
|
|
915
|
+
"""Search for models by hash or filename."""
|
|
916
|
+
from comfygit_core.utils.common import format_size
|
|
917
|
+
|
|
918
|
+
query = args.query
|
|
919
|
+
logger.info(f"Searching models for query: '{query}'")
|
|
920
|
+
|
|
921
|
+
try:
|
|
922
|
+
# Search for models
|
|
923
|
+
results = self.workspace.search_models(query)
|
|
924
|
+
|
|
925
|
+
logger.info(f"Found {len(results)} models matching query")
|
|
926
|
+
|
|
927
|
+
if not results:
|
|
928
|
+
print(f"No models found matching: {query}")
|
|
929
|
+
return
|
|
930
|
+
|
|
931
|
+
# Group models by hash (same file in different locations)
|
|
932
|
+
from collections import defaultdict
|
|
933
|
+
from pathlib import Path
|
|
934
|
+
from typing import Any
|
|
935
|
+
|
|
936
|
+
grouped: defaultdict[str, dict[str, Any]] = defaultdict(lambda: {'model': None, 'paths': []})
|
|
937
|
+
for model in results:
|
|
938
|
+
grouped[model.hash]['model'] = model
|
|
939
|
+
if model.base_directory:
|
|
940
|
+
full_path = Path(model.base_directory) / model.relative_path
|
|
941
|
+
else:
|
|
942
|
+
full_path = Path(model.relative_path)
|
|
943
|
+
grouped[model.hash]['paths'].append(full_path)
|
|
944
|
+
|
|
945
|
+
# Convert to list for pagination
|
|
946
|
+
grouped_results = list(grouped.values())
|
|
947
|
+
|
|
948
|
+
# Define how to render a single model with all its locations
|
|
949
|
+
def render_model(group):
|
|
950
|
+
model = group['model']
|
|
951
|
+
paths = group['paths']
|
|
952
|
+
size_str = format_size(model.file_size)
|
|
953
|
+
print(f"\n {model.filename}")
|
|
954
|
+
print(f" Size: {size_str}")
|
|
955
|
+
print(f" Hash: {model.hash}")
|
|
956
|
+
if len(paths) == 1:
|
|
957
|
+
print(f" Location: {paths[0]}")
|
|
958
|
+
else:
|
|
959
|
+
print(f" Locations ({len(paths)}):")
|
|
960
|
+
for path in paths:
|
|
961
|
+
print(f" • {path}")
|
|
962
|
+
|
|
963
|
+
# Use pagination for results
|
|
964
|
+
unique_count = len(grouped_results)
|
|
965
|
+
total_count = len(results)
|
|
966
|
+
if unique_count == total_count:
|
|
967
|
+
header = f"🔍 Found {unique_count} model(s) matching '{query}':"
|
|
968
|
+
else:
|
|
969
|
+
header = f"🔍 Found {unique_count} unique model(s) ({total_count} locations) matching '{query}':"
|
|
970
|
+
paginate(grouped_results, render_model, page_size=5, header=header)
|
|
971
|
+
|
|
972
|
+
except Exception as e:
|
|
973
|
+
logger.error(f"Model search failed for query '{query}': {e}")
|
|
974
|
+
print(f"✗ Search failed: {e}", file=sys.stderr)
|
|
975
|
+
sys.exit(1)
|
|
976
|
+
|
|
977
|
+
@with_workspace_logging("model index show")
|
|
978
|
+
def model_index_show(self, args: argparse.Namespace) -> None:
|
|
979
|
+
"""Show detailed information about a specific model."""
|
|
980
|
+
from datetime import datetime
|
|
981
|
+
|
|
982
|
+
from comfygit_core.utils.common import format_size
|
|
983
|
+
|
|
984
|
+
identifier = args.identifier
|
|
985
|
+
logger.info(f"Showing details for model: '{identifier}'")
|
|
986
|
+
|
|
987
|
+
try:
|
|
988
|
+
details = self.workspace.get_model_details(identifier)
|
|
989
|
+
model = details.model
|
|
990
|
+
sources = details.sources
|
|
991
|
+
locations = details.all_locations
|
|
992
|
+
|
|
993
|
+
# Display detailed information
|
|
994
|
+
print(f"📦 Model Details: {model.filename}\n")
|
|
995
|
+
|
|
996
|
+
# Core identification
|
|
997
|
+
print(f" Hash: {model.hash}")
|
|
998
|
+
print(f" Blake3: {model.blake3_hash or 'Not computed'}")
|
|
999
|
+
print(f" SHA256: {model.sha256_hash or 'Not computed'}")
|
|
1000
|
+
print(f" Size: {format_size(model.file_size)}")
|
|
1001
|
+
print(f" Category: {model.category}")
|
|
1002
|
+
|
|
1003
|
+
# Timestamps
|
|
1004
|
+
first_seen = datetime.fromtimestamp(model.last_seen).strftime("%Y-%m-%d %H:%M:%S")
|
|
1005
|
+
print(f" Last Seen: {first_seen}")
|
|
1006
|
+
|
|
1007
|
+
# Locations
|
|
1008
|
+
print(f"\n Locations ({len(locations)}):")
|
|
1009
|
+
for loc in locations:
|
|
1010
|
+
from pathlib import Path
|
|
1011
|
+
mtime = datetime.fromtimestamp(loc['mtime']).strftime("%Y-%m-%d %H:%M:%S")
|
|
1012
|
+
if loc.get('base_directory'):
|
|
1013
|
+
full_path = Path(loc['base_directory']) / loc['relative_path']
|
|
1014
|
+
print(f" • {full_path}")
|
|
1015
|
+
else:
|
|
1016
|
+
print(f" • {loc['relative_path']}")
|
|
1017
|
+
print(f" Modified: {mtime}")
|
|
1018
|
+
|
|
1019
|
+
# Sources
|
|
1020
|
+
if sources:
|
|
1021
|
+
print(f"\n Sources ({len(sources)}):")
|
|
1022
|
+
for source in sources:
|
|
1023
|
+
print(f" • {source['type'].title()}")
|
|
1024
|
+
print(f" URL: {source['url']}")
|
|
1025
|
+
if source['metadata']:
|
|
1026
|
+
for key, value in source['metadata'].items():
|
|
1027
|
+
print(f" {key}: {value}")
|
|
1028
|
+
added = datetime.fromtimestamp(source['added_time']).strftime("%Y-%m-%d %H:%M:%S")
|
|
1029
|
+
print(f" Added: {added}")
|
|
1030
|
+
else:
|
|
1031
|
+
print("\n Sources: None")
|
|
1032
|
+
print(f" Add with: cg model add-source {model.hash[:12]}")
|
|
1033
|
+
|
|
1034
|
+
# Metadata (if any)
|
|
1035
|
+
if model.metadata:
|
|
1036
|
+
print("\n Metadata:")
|
|
1037
|
+
for key, value in model.metadata.items():
|
|
1038
|
+
print(f" {key}: {value}")
|
|
1039
|
+
|
|
1040
|
+
except KeyError:
|
|
1041
|
+
print(f"No model found matching: {identifier}")
|
|
1042
|
+
except ValueError:
|
|
1043
|
+
# Handle ambiguous matches - group by hash to show unique models
|
|
1044
|
+
from collections import defaultdict
|
|
1045
|
+
results = self.workspace.search_models(identifier)
|
|
1046
|
+
|
|
1047
|
+
grouped = defaultdict(list)
|
|
1048
|
+
for model in results:
|
|
1049
|
+
grouped[model.hash].append(model)
|
|
1050
|
+
|
|
1051
|
+
print(f"Multiple models found matching '{identifier}':\n")
|
|
1052
|
+
for idx, (hash_val, models) in enumerate(grouped.items(), 1):
|
|
1053
|
+
model = models[0] # Use first for display
|
|
1054
|
+
location_count = f" ({len(models)} locations)" if len(models) > 1 else ""
|
|
1055
|
+
print(f" {idx}. {model.filename}{location_count}")
|
|
1056
|
+
print(f" Hash: {hash_val[:12]}...")
|
|
1057
|
+
print(f" Path: {model.relative_path}")
|
|
1058
|
+
|
|
1059
|
+
print("\nUse more specific identifier:")
|
|
1060
|
+
first_model = list(grouped.values())[0][0]
|
|
1061
|
+
print(f" Full hash: cg model index show {first_model.hash}")
|
|
1062
|
+
print(f" Filename: cg model index show {first_model.filename}")
|
|
1063
|
+
except Exception as e:
|
|
1064
|
+
logger.error(f"Failed to show model details for '{identifier}': {e}")
|
|
1065
|
+
print(f"✗ Failed to show model: {e}", file=sys.stderr)
|
|
1066
|
+
sys.exit(1)
|
|
1067
|
+
|
|
1068
|
+
# === Model Directory Commands ===
|
|
1069
|
+
|
|
1070
|
+
# === Registry Commands ===
|
|
1071
|
+
|
|
1072
|
+
@with_workspace_logging("registry status")
|
|
1073
|
+
def registry_status(self, args: argparse.Namespace) -> None:
|
|
1074
|
+
"""Show registry cache status."""
|
|
1075
|
+
try:
|
|
1076
|
+
info = self.workspace.get_registry_info()
|
|
1077
|
+
|
|
1078
|
+
if not info['exists']:
|
|
1079
|
+
print("✗ No registry data cached")
|
|
1080
|
+
print(" Run 'cg index registry update' to fetch")
|
|
1081
|
+
return
|
|
1082
|
+
|
|
1083
|
+
print("📦 Registry Cache Status:")
|
|
1084
|
+
print(f" Path: {info['path']}")
|
|
1085
|
+
print(f" Age: {info['age_hours']} hours")
|
|
1086
|
+
print(f" Stale: {'Yes' if info['stale'] else 'No'} (>24 hours)")
|
|
1087
|
+
if info['version']:
|
|
1088
|
+
print(f" Version: {info['version']}")
|
|
1089
|
+
|
|
1090
|
+
except Exception as e:
|
|
1091
|
+
logger.error(f"Failed to get registry status: {e}")
|
|
1092
|
+
print(f"✗ Failed to get registry status: {e}", file=sys.stderr)
|
|
1093
|
+
sys.exit(1)
|
|
1094
|
+
|
|
1095
|
+
@with_workspace_logging("registry update")
|
|
1096
|
+
def registry_update(self, args: argparse.Namespace) -> None:
|
|
1097
|
+
"""Update registry data from GitHub."""
|
|
1098
|
+
try:
|
|
1099
|
+
print("🔄 Updating registry data from GitHub...")
|
|
1100
|
+
|
|
1101
|
+
success = self.workspace.update_registry_data()
|
|
1102
|
+
|
|
1103
|
+
if success:
|
|
1104
|
+
info = self.workspace.get_registry_info()
|
|
1105
|
+
print("✓ Registry data updated successfully")
|
|
1106
|
+
if info['version']:
|
|
1107
|
+
print(f" Version: {info['version']}")
|
|
1108
|
+
else:
|
|
1109
|
+
print("✗ Failed to update registry data")
|
|
1110
|
+
print(" Using existing cache if available")
|
|
1111
|
+
|
|
1112
|
+
except Exception as e:
|
|
1113
|
+
logger.error(f"Failed to update registry: {e}")
|
|
1114
|
+
print(f"✗ Failed to update registry: {e}", file=sys.stderr)
|
|
1115
|
+
sys.exit(1)
|
|
1116
|
+
|
|
1117
|
+
@with_workspace_logging("model index dir")
|
|
1118
|
+
def model_dir_add(self, args: argparse.Namespace) -> None:
|
|
1119
|
+
"""Set the global models directory."""
|
|
1120
|
+
from comfygit_cli.utils.progress import create_model_sync_progress
|
|
1121
|
+
|
|
1122
|
+
directory_path = args.path.resolve()
|
|
1123
|
+
logger.info(f"Setting models directory: {directory_path}")
|
|
1124
|
+
|
|
1125
|
+
try:
|
|
1126
|
+
print(f"📁 Setting global models directory: {directory_path}")
|
|
1127
|
+
|
|
1128
|
+
if not directory_path.exists():
|
|
1129
|
+
print(f"✗ Directory does not exist: {directory_path}")
|
|
1130
|
+
sys.exit(1)
|
|
1131
|
+
|
|
1132
|
+
if not directory_path.is_dir():
|
|
1133
|
+
print(f"✗ Path is not a directory: {directory_path}")
|
|
1134
|
+
sys.exit(1)
|
|
1135
|
+
|
|
1136
|
+
# Set the models directory and perform initial scan with progress
|
|
1137
|
+
progress = create_model_sync_progress()
|
|
1138
|
+
self.workspace.set_models_directory(directory_path, progress=progress)
|
|
1139
|
+
|
|
1140
|
+
print(f"\n✓ Models directory set successfully: {directory_path}")
|
|
1141
|
+
print(" Use 'cg model index sync' to rescan when models change")
|
|
1142
|
+
|
|
1143
|
+
except Exception as e:
|
|
1144
|
+
logger.error(f"Failed to set models directory '{directory_path}': {e}")
|
|
1145
|
+
print(f"✗ Failed to set models directory: {e}", file=sys.stderr)
|
|
1146
|
+
sys.exit(1)
|
|
1147
|
+
|
|
1148
|
+
@with_workspace_logging("model index sync")
|
|
1149
|
+
def model_index_sync(self, args: argparse.Namespace) -> None:
|
|
1150
|
+
"""Scan models directory and update index."""
|
|
1151
|
+
from comfygit_cli.utils.progress import create_model_sync_progress
|
|
1152
|
+
|
|
1153
|
+
logger.info("Syncing models directory")
|
|
1154
|
+
|
|
1155
|
+
try:
|
|
1156
|
+
progress = create_model_sync_progress()
|
|
1157
|
+
result = self.workspace.sync_model_directory(progress=progress)
|
|
1158
|
+
|
|
1159
|
+
if result is None:
|
|
1160
|
+
print("✗ No models directory configured")
|
|
1161
|
+
print(" Run 'cg model index dir <path>' to set your models directory")
|
|
1162
|
+
return
|
|
1163
|
+
|
|
1164
|
+
# Progress callback already handled display
|
|
1165
|
+
|
|
1166
|
+
except Exception as e:
|
|
1167
|
+
logger.error(f"Failed to sync models: {e}")
|
|
1168
|
+
print(f"✗ Failed to sync: {e}", file=sys.stderr)
|
|
1169
|
+
sys.exit(1)
|
|
1170
|
+
|
|
1171
|
+
@with_workspace_logging("model index status")
|
|
1172
|
+
def model_index_status(self, args: argparse.Namespace) -> None:
|
|
1173
|
+
"""Show model index status and statistics."""
|
|
1174
|
+
logger.info("Getting model status")
|
|
1175
|
+
|
|
1176
|
+
try:
|
|
1177
|
+
# Get models directory info
|
|
1178
|
+
models_dir = self.workspace.get_models_directory()
|
|
1179
|
+
|
|
1180
|
+
# Get stats
|
|
1181
|
+
stats = self.workspace.get_model_stats()
|
|
1182
|
+
|
|
1183
|
+
print("📊 Model Index Status:")
|
|
1184
|
+
print()
|
|
1185
|
+
|
|
1186
|
+
if models_dir:
|
|
1187
|
+
exists = "✓" if models_dir.exists() else "✗"
|
|
1188
|
+
print(f" Models Directory: {exists} {models_dir}")
|
|
1189
|
+
else:
|
|
1190
|
+
print(" Models Directory: Not configured")
|
|
1191
|
+
print(" Run 'cg model index dir <path>' to set your models directory")
|
|
1192
|
+
return
|
|
1193
|
+
|
|
1194
|
+
total_models = stats.get('total_models', 0)
|
|
1195
|
+
total_locations = stats.get('total_locations', 0)
|
|
1196
|
+
print(f" Total Models: {total_models} unique models")
|
|
1197
|
+
print(f" Total Files: {total_locations} files indexed")
|
|
1198
|
+
|
|
1199
|
+
if total_locations > total_models:
|
|
1200
|
+
duplicates = total_locations - total_models
|
|
1201
|
+
print(f" Duplicates: {duplicates} duplicate files detected")
|
|
1202
|
+
|
|
1203
|
+
except Exception as e:
|
|
1204
|
+
logger.error(f"Failed to get status: {e}")
|
|
1205
|
+
print(f"✗ Failed to get status: {e}", file=sys.stderr)
|
|
1206
|
+
sys.exit(1)
|
|
1207
|
+
|
|
1208
|
+
@with_workspace_logging("model download")
|
|
1209
|
+
def model_download(self, args: argparse.Namespace) -> None:
|
|
1210
|
+
"""Download model from URL with interactive path confirmation."""
|
|
1211
|
+
from comfygit_core.services.model_downloader import DownloadRequest
|
|
1212
|
+
|
|
1213
|
+
url = args.url
|
|
1214
|
+
logger.info(f"Downloading model from: {url}")
|
|
1215
|
+
|
|
1216
|
+
try:
|
|
1217
|
+
# Get models directory
|
|
1218
|
+
models_dir = self.workspace.get_models_directory()
|
|
1219
|
+
downloader = self.workspace.model_downloader
|
|
1220
|
+
|
|
1221
|
+
# Determine target path
|
|
1222
|
+
if args.path:
|
|
1223
|
+
# User specified explicit path
|
|
1224
|
+
suggested_path = Path(args.path)
|
|
1225
|
+
elif args.category:
|
|
1226
|
+
# User specified category - extract filename from URL
|
|
1227
|
+
filename = downloader._extract_filename(url, None)
|
|
1228
|
+
suggested_path = Path(args.category) / filename
|
|
1229
|
+
else:
|
|
1230
|
+
# Auto-suggest based on URL/filename
|
|
1231
|
+
suggested_path = downloader.suggest_path(url, node_type=None, filename_hint=None)
|
|
1232
|
+
|
|
1233
|
+
# Path confirmation loop (unless --yes)
|
|
1234
|
+
while not args.yes:
|
|
1235
|
+
print(f"\n📥 Downloading from: {url}")
|
|
1236
|
+
print(f" Model will be saved to: {suggested_path}")
|
|
1237
|
+
print("\n [Y] Continue [m] Change path [c] Cancel")
|
|
1238
|
+
|
|
1239
|
+
choice = input("Choice [Y]/m/c: ").strip().lower()
|
|
1240
|
+
|
|
1241
|
+
if choice == 'c':
|
|
1242
|
+
print("✗ Download cancelled")
|
|
1243
|
+
return
|
|
1244
|
+
elif choice == 'm':
|
|
1245
|
+
new_path = input("\nEnter path (relative to models dir): ").strip()
|
|
1246
|
+
if new_path:
|
|
1247
|
+
suggested_path = Path(new_path)
|
|
1248
|
+
continue # Show menu again with updated path
|
|
1249
|
+
else:
|
|
1250
|
+
print("✗ Download cancelled")
|
|
1251
|
+
return
|
|
1252
|
+
elif choice in ['y', '']:
|
|
1253
|
+
break # Confirmed, proceed to download
|
|
1254
|
+
else:
|
|
1255
|
+
print("Invalid choice. Please enter Y, m, or c.")
|
|
1256
|
+
|
|
1257
|
+
# Create download request
|
|
1258
|
+
target_path = models_dir / suggested_path
|
|
1259
|
+
request = DownloadRequest(
|
|
1260
|
+
url=url,
|
|
1261
|
+
target_path=target_path,
|
|
1262
|
+
workflow_name=None
|
|
1263
|
+
)
|
|
1264
|
+
|
|
1265
|
+
# Download with progress callback
|
|
1266
|
+
print(f"\n📥 Downloading to: {suggested_path}")
|
|
1267
|
+
progress_callback = create_progress_callback()
|
|
1268
|
+
result = downloader.download(request, progress_callback=progress_callback)
|
|
1269
|
+
print() # New line after progress
|
|
1270
|
+
|
|
1271
|
+
# Handle result
|
|
1272
|
+
if not result.success:
|
|
1273
|
+
print(f"✗ Download failed: {result.error}")
|
|
1274
|
+
|
|
1275
|
+
# Show Civitai auth help if needed
|
|
1276
|
+
if "civitai.com" in url.lower() and result.error and (
|
|
1277
|
+
"401" in str(result.error) or "unauthorized" in str(result.error).lower()
|
|
1278
|
+
):
|
|
1279
|
+
show_civitai_auth_help()
|
|
1280
|
+
|
|
1281
|
+
sys.exit(1)
|
|
1282
|
+
|
|
1283
|
+
# Success - show stats
|
|
1284
|
+
if result.model:
|
|
1285
|
+
print()
|
|
1286
|
+
show_download_stats(result.model)
|
|
1287
|
+
logger.info(f"Successfully downloaded model to {result.model.relative_path}")
|
|
1288
|
+
else:
|
|
1289
|
+
print("✓ Download complete")
|
|
1290
|
+
|
|
1291
|
+
except Exception as e:
|
|
1292
|
+
logger.error(f"Model download failed: {e}")
|
|
1293
|
+
print(f"✗ Download failed: {e}", file=sys.stderr)
|
|
1294
|
+
sys.exit(1)
|
|
1295
|
+
|
|
1296
|
+
# === Model Source Management ===
|
|
1297
|
+
|
|
1298
|
+
@with_workspace_logging("model add-source")
|
|
1299
|
+
def model_add_source(self, args: argparse.Namespace) -> None:
|
|
1300
|
+
"""Add download source URLs to models."""
|
|
1301
|
+
env = self.workspace.get_active_environment()
|
|
1302
|
+
|
|
1303
|
+
# Mode detection: direct vs interactive
|
|
1304
|
+
if args.model and args.url:
|
|
1305
|
+
# Direct mode
|
|
1306
|
+
self._add_source_direct(env, args.model, args.url)
|
|
1307
|
+
else:
|
|
1308
|
+
# Interactive mode
|
|
1309
|
+
self._add_source_interactive(env)
|
|
1310
|
+
|
|
1311
|
+
def _add_source_direct(self, env, identifier: str, url: str):
|
|
1312
|
+
"""Direct mode: add source to specific model."""
|
|
1313
|
+
result = env.add_model_source(identifier, url)
|
|
1314
|
+
|
|
1315
|
+
if result.success:
|
|
1316
|
+
print(f"✓ Added source to {result.model.filename}")
|
|
1317
|
+
print(f" {url}")
|
|
1318
|
+
else:
|
|
1319
|
+
# Handle errors
|
|
1320
|
+
if result.error == "model_not_found":
|
|
1321
|
+
print(f"✗ Model not found: {identifier}", file=sys.stderr)
|
|
1322
|
+
print("\nHint: Use hash prefix or exact filename", file=sys.stderr)
|
|
1323
|
+
sys.exit(1)
|
|
1324
|
+
|
|
1325
|
+
elif result.error == "ambiguous_filename":
|
|
1326
|
+
print(f"✗ Multiple models match '{identifier}':", file=sys.stderr)
|
|
1327
|
+
for match in result.matches:
|
|
1328
|
+
print(f" • {match.relative_path} ({match.hash[:8]}...)", file=sys.stderr)
|
|
1329
|
+
print(f"\nUse full hash: cg model add-source <hash> {url}", file=sys.stderr)
|
|
1330
|
+
sys.exit(1)
|
|
1331
|
+
|
|
1332
|
+
elif result.error == "url_exists":
|
|
1333
|
+
print(f"✗ URL already exists for {result.model.filename}", file=sys.stderr)
|
|
1334
|
+
sys.exit(1)
|
|
1335
|
+
|
|
1336
|
+
def _add_source_interactive(self, env):
|
|
1337
|
+
"""Interactive mode: go through all models without sources."""
|
|
1338
|
+
statuses = env.get_models_without_sources()
|
|
1339
|
+
|
|
1340
|
+
if not statuses:
|
|
1341
|
+
print("✓ All models have download sources!")
|
|
1342
|
+
return
|
|
1343
|
+
|
|
1344
|
+
print("\n📦 Add Model Sources\n")
|
|
1345
|
+
print(f"Found {len(statuses)} model(s) without download sources\n")
|
|
1346
|
+
|
|
1347
|
+
added_count = 0
|
|
1348
|
+
skipped_count = 0
|
|
1349
|
+
|
|
1350
|
+
for idx, status in enumerate(statuses, 1):
|
|
1351
|
+
model = status.model
|
|
1352
|
+
available = status.available_locally
|
|
1353
|
+
|
|
1354
|
+
# Show model info
|
|
1355
|
+
print(f"[{idx}/{len(statuses)}] {model.filename}")
|
|
1356
|
+
print(f" Hash: {model.hash[:16]}...")
|
|
1357
|
+
print(f" Path: {model.relative_path}")
|
|
1358
|
+
|
|
1359
|
+
# Show availability status
|
|
1360
|
+
if available:
|
|
1361
|
+
print(" Status: ✓ Available locally")
|
|
1362
|
+
else:
|
|
1363
|
+
print(" Status: ✗ Not in local index (phantom reference)")
|
|
1364
|
+
|
|
1365
|
+
# Prompt for URL
|
|
1366
|
+
url = input("\n URL (or 's' to skip, 'q' to quit): ").strip()
|
|
1367
|
+
print()
|
|
1368
|
+
|
|
1369
|
+
if url.lower() == 'q':
|
|
1370
|
+
print("⊗ Cancelled\n")
|
|
1371
|
+
break
|
|
1372
|
+
elif url.lower() == 's' or not url:
|
|
1373
|
+
skipped_count += 1
|
|
1374
|
+
continue
|
|
1375
|
+
else:
|
|
1376
|
+
# Add source
|
|
1377
|
+
result = env.add_model_source(model.hash, url)
|
|
1378
|
+
|
|
1379
|
+
if result.success:
|
|
1380
|
+
print(" ✓ Added source\n")
|
|
1381
|
+
added_count += 1
|
|
1382
|
+
else:
|
|
1383
|
+
# Should not happen in this flow, but handle gracefully
|
|
1384
|
+
print(f" ✗ Failed to add source: {result.error}\n", file=sys.stderr)
|
|
1385
|
+
|
|
1386
|
+
# Summary
|
|
1387
|
+
print(f"✅ Complete: {added_count}/{len(statuses)} source(s) added")
|
|
1388
|
+
|
|
1389
|
+
if added_count > 0:
|
|
1390
|
+
print("\nYour environment is now more shareable!")
|
|
1391
|
+
print(" Run 'cg export' to bundle and distribute")
|
|
1392
|
+
|
|
1393
|
+
# === Config Management ===
|
|
1394
|
+
|
|
1395
|
+
@with_workspace_logging("config")
|
|
1396
|
+
def config(self, args: argparse.Namespace) -> None:
|
|
1397
|
+
"""Manage ComfyGit configuration settings."""
|
|
1398
|
+
# Flag mode - direct operations
|
|
1399
|
+
if hasattr(args, 'civitai_key') and args.civitai_key is not None:
|
|
1400
|
+
self._set_civitai_key(args.civitai_key)
|
|
1401
|
+
return
|
|
1402
|
+
|
|
1403
|
+
if hasattr(args, 'show') and args.show:
|
|
1404
|
+
self._show_config()
|
|
1405
|
+
return
|
|
1406
|
+
|
|
1407
|
+
# Interactive mode - no flags provided
|
|
1408
|
+
self._interactive_config()
|
|
1409
|
+
|
|
1410
|
+
def _set_civitai_key(self, key: str):
|
|
1411
|
+
"""Set Civitai API key."""
|
|
1412
|
+
if key == "":
|
|
1413
|
+
self.workspace.workspace_config_manager.set_civitai_token(None)
|
|
1414
|
+
print("✓ Civitai API key cleared")
|
|
1415
|
+
else:
|
|
1416
|
+
self.workspace.workspace_config_manager.set_civitai_token(key)
|
|
1417
|
+
print("✓ Civitai API key saved")
|
|
1418
|
+
|
|
1419
|
+
def _show_config(self):
|
|
1420
|
+
"""Display current configuration."""
|
|
1421
|
+
print("ComfyGit Configuration:\n")
|
|
1422
|
+
|
|
1423
|
+
# Workspace path
|
|
1424
|
+
print(f" Workspace Path: {self.workspace.paths.root}")
|
|
1425
|
+
|
|
1426
|
+
# Civitai API Key
|
|
1427
|
+
token = self.workspace.workspace_config_manager.get_civitai_token()
|
|
1428
|
+
if token:
|
|
1429
|
+
# Mask key showing last 4 chars
|
|
1430
|
+
masked = f"••••••••{token[-4:]}" if len(token) > 4 else "••••"
|
|
1431
|
+
print(f" Civitai API Key: {masked}")
|
|
1432
|
+
else:
|
|
1433
|
+
print(" Civitai API Key: Not set")
|
|
1434
|
+
|
|
1435
|
+
# Registry cache preference
|
|
1436
|
+
prefer_cache = self.workspace.workspace_config_manager.get_prefer_registry_cache()
|
|
1437
|
+
print(f" Registry Cache: {'Enabled' if prefer_cache else 'Disabled'}")
|
|
1438
|
+
|
|
1439
|
+
def _interactive_config(self):
|
|
1440
|
+
"""Interactive configuration menu."""
|
|
1441
|
+
while True:
|
|
1442
|
+
# Get current config
|
|
1443
|
+
civitai_token = self.workspace.workspace_config_manager.get_civitai_token()
|
|
1444
|
+
prefer_cache = self.workspace.workspace_config_manager.get_prefer_registry_cache()
|
|
1445
|
+
|
|
1446
|
+
# Display menu
|
|
1447
|
+
print("\nComfyGit Configuration\n")
|
|
1448
|
+
|
|
1449
|
+
# Civitai key status
|
|
1450
|
+
if civitai_token:
|
|
1451
|
+
masked = f"••••••••{civitai_token[-4:]}" if len(civitai_token) > 4 else "••••"
|
|
1452
|
+
print(f" 1. Civitai API Key: {masked}")
|
|
1453
|
+
else:
|
|
1454
|
+
print(" 1. Civitai API Key: Not set")
|
|
1455
|
+
|
|
1456
|
+
# Registry cache
|
|
1457
|
+
cache_status = "Enabled" if prefer_cache else "Disabled"
|
|
1458
|
+
print(f" 2. Registry Cache: {cache_status}")
|
|
1459
|
+
|
|
1460
|
+
# Options
|
|
1461
|
+
print("\n [1-2] Change setting [c] Clear a setting [q] Quit")
|
|
1462
|
+
choice = input("Choice: ").strip().lower()
|
|
1463
|
+
|
|
1464
|
+
if choice == 'q':
|
|
1465
|
+
break
|
|
1466
|
+
elif choice == '1':
|
|
1467
|
+
self._interactive_set_civitai_key()
|
|
1468
|
+
elif choice == '2':
|
|
1469
|
+
self._interactive_toggle_registry_cache()
|
|
1470
|
+
elif choice == 'c':
|
|
1471
|
+
self._interactive_clear_setting()
|
|
1472
|
+
else:
|
|
1473
|
+
print(" Invalid choice")
|
|
1474
|
+
|
|
1475
|
+
def _interactive_set_civitai_key(self):
|
|
1476
|
+
"""Interactive Civitai API key setup."""
|
|
1477
|
+
print("\n🔑 Civitai API Key Setup")
|
|
1478
|
+
print(" Get your key from: https://civitai.com/user/account")
|
|
1479
|
+
|
|
1480
|
+
key = input("\nEnter API key (or blank to cancel): ").strip()
|
|
1481
|
+
if not key:
|
|
1482
|
+
print(" Cancelled")
|
|
1483
|
+
return
|
|
1484
|
+
|
|
1485
|
+
self.workspace.workspace_config_manager.set_civitai_token(key)
|
|
1486
|
+
print("✓ API key saved")
|
|
1487
|
+
|
|
1488
|
+
def _interactive_toggle_registry_cache(self):
|
|
1489
|
+
"""Toggle registry cache preference."""
|
|
1490
|
+
current = self.workspace.workspace_config_manager.get_prefer_registry_cache()
|
|
1491
|
+
new_value = not current
|
|
1492
|
+
|
|
1493
|
+
self.workspace.workspace_config_manager.set_prefer_registry_cache(new_value)
|
|
1494
|
+
status = "enabled" if new_value else "disabled"
|
|
1495
|
+
print(f"✓ Registry cache {status}")
|
|
1496
|
+
|
|
1497
|
+
def _interactive_clear_setting(self):
|
|
1498
|
+
"""Clear a configuration setting."""
|
|
1499
|
+
print("\nClear which setting?")
|
|
1500
|
+
print(" 1. Civitai API Key")
|
|
1501
|
+
print("\n [1] Clear setting [c] Cancel")
|
|
1502
|
+
|
|
1503
|
+
choice = input("Choice: ").strip().lower()
|
|
1504
|
+
|
|
1505
|
+
if choice == "1":
|
|
1506
|
+
self.workspace.workspace_config_manager.set_civitai_token(None)
|
|
1507
|
+
print("✓ Civitai API key cleared")
|
|
1508
|
+
elif choice == "c" or choice == "":
|
|
1509
|
+
print(" Cancelled")
|
|
1510
|
+
else:
|
|
1511
|
+
print(" Invalid choice")
|
|
1512
|
+
|
|
1513
|
+
# === Orchestrator Management ===
|
|
1514
|
+
|
|
1515
|
+
def orch_status(self, args: argparse.Namespace) -> None:
|
|
1516
|
+
"""Show orchestrator status."""
|
|
1517
|
+
from .utils.orchestrator import (
|
|
1518
|
+
is_orchestrator_running,
|
|
1519
|
+
read_switch_status,
|
|
1520
|
+
get_orchestrator_uptime,
|
|
1521
|
+
format_uptime
|
|
1522
|
+
)
|
|
1523
|
+
|
|
1524
|
+
metadata_dir = self.workspace.path / ".metadata"
|
|
1525
|
+
|
|
1526
|
+
# Check orchestrator status
|
|
1527
|
+
is_running, pid = is_orchestrator_running(metadata_dir)
|
|
1528
|
+
|
|
1529
|
+
if args.json:
|
|
1530
|
+
# JSON output mode
|
|
1531
|
+
import json
|
|
1532
|
+
status_data = {
|
|
1533
|
+
"running": is_running,
|
|
1534
|
+
"pid": pid,
|
|
1535
|
+
}
|
|
1536
|
+
|
|
1537
|
+
if is_running:
|
|
1538
|
+
uptime = get_orchestrator_uptime(metadata_dir, pid)
|
|
1539
|
+
if uptime:
|
|
1540
|
+
status_data["uptime_seconds"] = int(uptime)
|
|
1541
|
+
|
|
1542
|
+
# Check switch status
|
|
1543
|
+
switch_status = read_switch_status(metadata_dir)
|
|
1544
|
+
if switch_status:
|
|
1545
|
+
status_data["switch"] = switch_status
|
|
1546
|
+
|
|
1547
|
+
print(json.dumps(status_data, indent=2))
|
|
1548
|
+
return
|
|
1549
|
+
|
|
1550
|
+
# Human-readable output
|
|
1551
|
+
print("\nOrchestrator Status")
|
|
1552
|
+
print("━" * 70)
|
|
1553
|
+
|
|
1554
|
+
if not is_running:
|
|
1555
|
+
if pid:
|
|
1556
|
+
print(f"Running: No (stale PID {pid})")
|
|
1557
|
+
else:
|
|
1558
|
+
print("Running: No")
|
|
1559
|
+
print("\nOrchestrator is not running.")
|
|
1560
|
+
print("Start ComfyUI to launch the orchestrator automatically.")
|
|
1561
|
+
print("━" * 70)
|
|
1562
|
+
return
|
|
1563
|
+
|
|
1564
|
+
print(f"Running: Yes (PID {pid})")
|
|
1565
|
+
|
|
1566
|
+
# Show uptime
|
|
1567
|
+
uptime = get_orchestrator_uptime(metadata_dir, pid)
|
|
1568
|
+
if uptime:
|
|
1569
|
+
print(f"Uptime: {format_uptime(uptime)}")
|
|
1570
|
+
|
|
1571
|
+
# Show control port
|
|
1572
|
+
control_port_file = metadata_dir / ".control_port"
|
|
1573
|
+
if control_port_file.exists():
|
|
1574
|
+
try:
|
|
1575
|
+
port = control_port_file.read_text().strip()
|
|
1576
|
+
print(f"Control Port: {port}")
|
|
1577
|
+
except IOError:
|
|
1578
|
+
pass
|
|
1579
|
+
|
|
1580
|
+
# Check switch status
|
|
1581
|
+
switch_status = read_switch_status(metadata_dir)
|
|
1582
|
+
if switch_status:
|
|
1583
|
+
state = switch_status.get("state", "unknown")
|
|
1584
|
+
progress = switch_status.get("progress", 0)
|
|
1585
|
+
message = switch_status.get("message", "")
|
|
1586
|
+
target_env = switch_status.get("target_env", "")
|
|
1587
|
+
source_env = switch_status.get("source_env", "")
|
|
1588
|
+
|
|
1589
|
+
print(f"\nSwitch Status: {state.replace('_', ' ').title()} ({progress}%)")
|
|
1590
|
+
if message:
|
|
1591
|
+
print(f" {message}")
|
|
1592
|
+
if source_env:
|
|
1593
|
+
print(f"Source Env: {source_env}")
|
|
1594
|
+
if target_env:
|
|
1595
|
+
print(f"Target Env: {target_env}")
|
|
1596
|
+
else:
|
|
1597
|
+
print("\nSwitch Status: Idle")
|
|
1598
|
+
|
|
1599
|
+
print("━" * 70)
|
|
1600
|
+
|
|
1601
|
+
def orch_restart(self, args: argparse.Namespace) -> None:
|
|
1602
|
+
"""Request orchestrator to restart ComfyUI."""
|
|
1603
|
+
import time
|
|
1604
|
+
from .utils.orchestrator import is_orchestrator_running, safe_write_command
|
|
1605
|
+
|
|
1606
|
+
metadata_dir = self.workspace.path / ".metadata"
|
|
1607
|
+
|
|
1608
|
+
# Check if orchestrator is running
|
|
1609
|
+
is_running, pid = is_orchestrator_running(metadata_dir)
|
|
1610
|
+
|
|
1611
|
+
if not is_running:
|
|
1612
|
+
print("✗ Orchestrator is not running")
|
|
1613
|
+
print(" Start ComfyUI to launch the orchestrator")
|
|
1614
|
+
sys.exit(1)
|
|
1615
|
+
|
|
1616
|
+
# Send restart command
|
|
1617
|
+
print(f"✓ Sending restart command to orchestrator (PID {pid})")
|
|
1618
|
+
safe_write_command(metadata_dir, {
|
|
1619
|
+
"command": "restart",
|
|
1620
|
+
"timestamp": time.time()
|
|
1621
|
+
})
|
|
1622
|
+
|
|
1623
|
+
print(" ComfyUI will restart within 500ms...")
|
|
1624
|
+
|
|
1625
|
+
if args.wait:
|
|
1626
|
+
print("\n Waiting for restart to complete...")
|
|
1627
|
+
time.sleep(2) # Give orchestrator time to process
|
|
1628
|
+
|
|
1629
|
+
# Wait for restart (check if PID changes or process restarts)
|
|
1630
|
+
for _ in range(30): # 15 second timeout
|
|
1631
|
+
time.sleep(0.5)
|
|
1632
|
+
is_running, new_pid = is_orchestrator_running(metadata_dir)
|
|
1633
|
+
if is_running:
|
|
1634
|
+
print(f"✓ Orchestrator restarted (PID {new_pid})")
|
|
1635
|
+
return
|
|
1636
|
+
|
|
1637
|
+
print("⚠️ Restart may still be in progress")
|
|
1638
|
+
|
|
1639
|
+
def orch_kill(self, args: argparse.Namespace) -> None:
|
|
1640
|
+
"""Shutdown orchestrator."""
|
|
1641
|
+
import time
|
|
1642
|
+
from .utils.orchestrator import (
|
|
1643
|
+
is_orchestrator_running,
|
|
1644
|
+
safe_write_command,
|
|
1645
|
+
kill_orchestrator_process,
|
|
1646
|
+
read_switch_status
|
|
1647
|
+
)
|
|
1648
|
+
|
|
1649
|
+
metadata_dir = self.workspace.path / ".metadata"
|
|
1650
|
+
|
|
1651
|
+
# Check if orchestrator is running
|
|
1652
|
+
is_running, pid = is_orchestrator_running(metadata_dir)
|
|
1653
|
+
|
|
1654
|
+
if not is_running:
|
|
1655
|
+
print("✗ Orchestrator is not running")
|
|
1656
|
+
if pid:
|
|
1657
|
+
print(f" (stale PID file exists: {pid})")
|
|
1658
|
+
return
|
|
1659
|
+
|
|
1660
|
+
# Check if mid-switch (warn user)
|
|
1661
|
+
switch_status = read_switch_status(metadata_dir)
|
|
1662
|
+
if switch_status:
|
|
1663
|
+
state = switch_status.get("state", "")
|
|
1664
|
+
if state not in ["complete", "failed", "aborted"]:
|
|
1665
|
+
print(f"⚠️ Orchestrator is currently switching environments (state: {state})")
|
|
1666
|
+
if not args.force:
|
|
1667
|
+
response = input(" Shutdown anyway? [y/N]: ").strip().lower()
|
|
1668
|
+
if response not in ['y', 'yes']:
|
|
1669
|
+
print("✗ Shutdown cancelled")
|
|
1670
|
+
return
|
|
1671
|
+
|
|
1672
|
+
if args.force:
|
|
1673
|
+
# Force kill (SIGTERM then SIGKILL if needed)
|
|
1674
|
+
print(f"✓ Force killing orchestrator (PID {pid})")
|
|
1675
|
+
# Sends SIGTERM, waits 3s for cleanup, then SIGKILL if still alive
|
|
1676
|
+
kill_orchestrator_process(pid, force=False)
|
|
1677
|
+
print("✓ Orchestrator terminated")
|
|
1678
|
+
print("\nNote: ComfyUI should have been shut down gracefully.")
|
|
1679
|
+
print(" If still running, check with: ps aux | grep 'ComfyUI/main.py'")
|
|
1680
|
+
else:
|
|
1681
|
+
# Graceful shutdown via command
|
|
1682
|
+
print(f"✓ Sending shutdown command to orchestrator (PID {pid})")
|
|
1683
|
+
safe_write_command(metadata_dir, {
|
|
1684
|
+
"command": "shutdown",
|
|
1685
|
+
"timestamp": time.time()
|
|
1686
|
+
})
|
|
1687
|
+
print(" Orchestrator will exit within 500ms...")
|
|
1688
|
+
|
|
1689
|
+
# Wait for shutdown
|
|
1690
|
+
time.sleep(1)
|
|
1691
|
+
is_running, _ = is_orchestrator_running(metadata_dir)
|
|
1692
|
+
if not is_running:
|
|
1693
|
+
print("✓ Orchestrator shut down")
|
|
1694
|
+
else:
|
|
1695
|
+
print("⚠️ Orchestrator may still be shutting down")
|
|
1696
|
+
|
|
1697
|
+
def orch_clean(self, args: argparse.Namespace) -> None:
|
|
1698
|
+
"""Clean orchestrator state files."""
|
|
1699
|
+
from .utils.orchestrator import (
|
|
1700
|
+
is_orchestrator_running,
|
|
1701
|
+
kill_orchestrator_process,
|
|
1702
|
+
cleanup_orchestrator_state
|
|
1703
|
+
)
|
|
1704
|
+
|
|
1705
|
+
metadata_dir = self.workspace.path / ".metadata"
|
|
1706
|
+
|
|
1707
|
+
# Check if orchestrator is running
|
|
1708
|
+
is_running, pid = is_orchestrator_running(metadata_dir)
|
|
1709
|
+
|
|
1710
|
+
# Show what will be cleaned
|
|
1711
|
+
files_to_show = [
|
|
1712
|
+
".orchestrator.pid",
|
|
1713
|
+
".control_port",
|
|
1714
|
+
".cmd",
|
|
1715
|
+
".switch_request.json",
|
|
1716
|
+
".switch_status.json",
|
|
1717
|
+
".switch.lock",
|
|
1718
|
+
".startup_state.json",
|
|
1719
|
+
".cmd.tmp.* (temp files)"
|
|
1720
|
+
]
|
|
1721
|
+
|
|
1722
|
+
if args.dry_run:
|
|
1723
|
+
print("\n🧹 Files that would be cleaned:")
|
|
1724
|
+
for filename in files_to_show:
|
|
1725
|
+
filepath = metadata_dir / filename.split()[0]
|
|
1726
|
+
if '*' in filename or filepath.exists():
|
|
1727
|
+
print(f" • {filename}")
|
|
1728
|
+
print("\nNote: workspace_config.json will be preserved")
|
|
1729
|
+
return
|
|
1730
|
+
|
|
1731
|
+
# Confirm if orchestrator is running
|
|
1732
|
+
if is_running and not args.force:
|
|
1733
|
+
print(f"⚠️ Warning: Orchestrator is currently running (PID {pid})")
|
|
1734
|
+
print("\nThis will forcefully clean orchestrator state.")
|
|
1735
|
+
print("Files to remove:")
|
|
1736
|
+
for filename in files_to_show:
|
|
1737
|
+
print(f" • {filename}")
|
|
1738
|
+
print("\nNote: workspace_config.json will be preserved")
|
|
1739
|
+
|
|
1740
|
+
if args.kill:
|
|
1741
|
+
print(f"\n⚠️ --kill flag: Will also terminate orchestrator process")
|
|
1742
|
+
|
|
1743
|
+
response = input("\nContinue? [y/N]: ").strip().lower()
|
|
1744
|
+
if response not in ['y', 'yes']:
|
|
1745
|
+
print("✗ Cleaning cancelled")
|
|
1746
|
+
return
|
|
1747
|
+
|
|
1748
|
+
# Kill orchestrator if requested
|
|
1749
|
+
if is_running and args.kill:
|
|
1750
|
+
print(f"\n✓ Terminating orchestrator process {pid}")
|
|
1751
|
+
print(" (giving it a chance to shut down ComfyUI gracefully...)")
|
|
1752
|
+
# Use force=False to send SIGTERM first, allowing cleanup handlers to run
|
|
1753
|
+
# Will still SIGKILL after 3s if process doesn't exit
|
|
1754
|
+
kill_orchestrator_process(pid, force=False)
|
|
1755
|
+
|
|
1756
|
+
# Clean up state files
|
|
1757
|
+
print("\n🧹 Cleaning orchestrator state...")
|
|
1758
|
+
removed = cleanup_orchestrator_state(metadata_dir, preserve_config=True)
|
|
1759
|
+
|
|
1760
|
+
if removed:
|
|
1761
|
+
for filename in removed:
|
|
1762
|
+
print(f" ✓ Removed {filename}")
|
|
1763
|
+
print(f"\n✓ Cleaned {len(removed)} file(s)")
|
|
1764
|
+
else:
|
|
1765
|
+
print(" No files to clean")
|
|
1766
|
+
|
|
1767
|
+
print("\n✓ Orchestrator state cleaned")
|
|
1768
|
+
|
|
1769
|
+
# Helpful next steps
|
|
1770
|
+
if args.kill:
|
|
1771
|
+
print("\nNote: If ComfyUI is still running, you can find it with:")
|
|
1772
|
+
print(" ps aux | grep 'ComfyUI/main.py'")
|
|
1773
|
+
print("\nOr restart fresh with:")
|
|
1774
|
+
print(" cg -e <env> run")
|
|
1775
|
+
else:
|
|
1776
|
+
print("\nYou can now:")
|
|
1777
|
+
print(" • Run ComfyUI manually from an environment directory")
|
|
1778
|
+
print(" • Start new orchestrator via ComfyUI startup")
|
|
1779
|
+
|
|
1780
|
+
def orch_logs(self, args: argparse.Namespace) -> None:
|
|
1781
|
+
"""Show orchestrator logs."""
|
|
1782
|
+
import subprocess
|
|
1783
|
+
from .utils.orchestrator import tail_log_file
|
|
1784
|
+
|
|
1785
|
+
metadata_dir = self.workspace.path / ".metadata"
|
|
1786
|
+
log_file = metadata_dir / "orchestrator.log"
|
|
1787
|
+
|
|
1788
|
+
if not log_file.exists():
|
|
1789
|
+
print("✗ No orchestrator log file found")
|
|
1790
|
+
print(f" Expected: {log_file}")
|
|
1791
|
+
return
|
|
1792
|
+
|
|
1793
|
+
if args.follow:
|
|
1794
|
+
# Use tail -f for live following
|
|
1795
|
+
print(f"Following {log_file} (Ctrl+C to stop)\n")
|
|
1796
|
+
try:
|
|
1797
|
+
subprocess.run(["tail", "-f", str(log_file)])
|
|
1798
|
+
except KeyboardInterrupt:
|
|
1799
|
+
print("\n")
|
|
1800
|
+
else:
|
|
1801
|
+
# Show last N lines
|
|
1802
|
+
lines = tail_log_file(log_file, args.lines)
|
|
1803
|
+
if lines:
|
|
1804
|
+
print("".join(lines))
|
|
1805
|
+
else:
|
|
1806
|
+
print("(empty log file)")
|