ollamadiffuser 1.1.6__py3-none-any.whl → 1.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,7 +4,7 @@ OllamaDiffuser - Local AI Image Generation with Ollama-style CLI
4
4
  A tool for managing and running Stable Diffusion, FLUX.1, and other AI image generation models locally.
5
5
  """
6
6
 
7
- __version__ = "1.1.6"
7
+ __version__ = "1.2.1"
8
8
  __author__ = "OllamaDiffuser Team"
9
9
  __email__ = "ollamadiffuser@gmail.com"
10
10
  __description__ = "🎨 Local AI Image Generation with Ollama-style CLI for Stable Diffusion, FLUX.1, and LoRA support"
@@ -20,7 +20,9 @@ class GenerateRequest(BaseModel):
20
20
  prompt: str
21
21
  negative_prompt: str = "low quality, bad anatomy, worst quality, low resolution"
22
22
  num_inference_steps: Optional[int] = None
23
+ steps: Optional[int] = None # Alias for num_inference_steps for convenience
23
24
  guidance_scale: Optional[float] = None
25
+ cfg_scale: Optional[float] = None # Alias for guidance_scale for convenience
24
26
  width: int = 1024
25
27
  height: int = 1024
26
28
  control_image_path: Optional[str] = None # Path to control image file
@@ -232,12 +234,18 @@ def create_app() -> FastAPI:
232
234
  # Get current loaded inference engine
233
235
  engine = model_manager.loaded_model
234
236
 
237
+ # Handle parameter aliasing - prioritize shorter names for convenience
238
+ steps = request.steps if request.steps is not None else request.num_inference_steps
239
+ guidance = request.cfg_scale if request.cfg_scale is not None else request.guidance_scale
240
+
235
241
  # Generate image
236
242
  image = engine.generate_image(
237
243
  prompt=request.prompt,
238
244
  negative_prompt=request.negative_prompt,
239
- num_inference_steps=request.num_inference_steps,
240
- guidance_scale=request.guidance_scale,
245
+ num_inference_steps=steps,
246
+ steps=steps, # Pass both for GGUF compatibility
247
+ guidance_scale=guidance,
248
+ cfg_scale=guidance, # Pass both for GGUF compatibility
241
249
  width=request.width,
242
250
  height=request.height,
243
251
  control_image=request.control_image_path,
@@ -12,6 +12,7 @@ import time
12
12
  from .. import __version__, print_version
13
13
  from ..core.models.manager import model_manager
14
14
  from ..core.config.settings import settings
15
+ from ..core.config.model_registry import model_registry
15
16
  from ..api.server import run_server
16
17
 
17
18
  console = Console()
@@ -158,27 +159,29 @@ def run(model_name: str, host: Optional[str], port: Optional[int]):
158
159
  @cli.command()
159
160
  @click.option('--hardware', '-hw', is_flag=True, help='Show hardware requirements')
160
161
  def list(hardware: bool):
161
- """List all models"""
162
- available_models = model_manager.list_available_models()
162
+ """List installed models only"""
163
163
  installed_models = model_manager.list_installed_models()
164
164
  current_model = model_manager.get_current_model()
165
165
 
166
+ if not installed_models:
167
+ rprint("[yellow]No models installed[/yellow]")
168
+ rprint("\n[dim]💡 Download models with: ollamadiffuser pull <model-name>[/dim]")
169
+ rprint("[dim]💡 See all available models: ollamadiffuser registry list[/dim]")
170
+ rprint("[dim]💡 See only available models: ollamadiffuser registry list --available-only[/dim]")
171
+ return
172
+
166
173
  if hardware:
167
174
  # Show detailed hardware requirements
168
- for model_name in available_models:
175
+ for model_name in installed_models:
169
176
  info = model_manager.get_model_info(model_name)
170
177
  if not info:
171
178
  continue
172
179
 
173
180
  # Check installation status
174
- if model_name in installed_models:
175
- status = "✅ Installed"
176
- if model_name == current_model:
177
- status += " (current)"
178
- size = info.get('size', 'Unknown')
179
- else:
180
- status = "⬇️ Available"
181
- size = "-"
181
+ status = "✅ Installed"
182
+ if model_name == current_model:
183
+ status += " (current)"
184
+ size = info.get('size', 'Unknown')
182
185
 
183
186
  # Create individual table for each model
184
187
  table = Table(title=f"[bold cyan]{model_name}[/bold cyan] - {status}")
@@ -204,30 +207,23 @@ def list(hardware: bool):
204
207
  console.print() # Add spacing between models
205
208
  else:
206
209
  # Show compact table
207
- table = Table(title="OllamaDiffuser Model List")
210
+ table = Table(title="Installed Models")
208
211
  table.add_column("Model Name", style="cyan", no_wrap=True)
209
212
  table.add_column("Status", style="green")
210
213
  table.add_column("Size", style="blue")
211
214
  table.add_column("Type", style="magenta")
212
215
  table.add_column("Min VRAM", style="yellow")
213
216
 
214
- for model_name in available_models:
217
+ for model_name in installed_models:
215
218
  # Check installation status
216
- if model_name in installed_models:
217
- status = "✅ Installed"
218
- if model_name == current_model:
219
- status += " (current)"
220
-
221
- # Get model information
222
- info = model_manager.get_model_info(model_name)
223
- size = info.get('size', 'Unknown') if info else 'Unknown'
224
- model_type = info.get('model_type', 'Unknown') if info else 'Unknown'
225
- else:
226
- status = "⬇️ Available"
227
- size = "-"
228
- # Get type from registry
229
- info = model_manager.get_model_info(model_name)
230
- model_type = info.get('model_type', 'Unknown') if info else 'Unknown'
219
+ status = "✅ Installed"
220
+ if model_name == current_model:
221
+ status += " (current)"
222
+
223
+ # Get model information
224
+ info = model_manager.get_model_info(model_name)
225
+ size = info.get('size', 'Unknown') if info else 'Unknown'
226
+ model_type = info.get('model_type', 'Unknown') if info else 'Unknown'
231
227
 
232
228
  # Get hardware requirements
233
229
  hw_req = info.get('hardware_requirements', {}) if info else {}
@@ -236,7 +232,18 @@ def list(hardware: bool):
236
232
  table.add_row(model_name, status, size, model_type, min_vram)
237
233
 
238
234
  console.print(table)
235
+
236
+ # Get counts for summary
237
+ available_models = model_registry.get_available_models()
238
+ external_models = model_registry.get_external_api_models_only()
239
+
240
+ console.print(f"\n[dim]💡 Installed: {len(installed_models)} models[/dim]")
241
+ console.print(f"[dim]💡 Available for download: {len(available_models)} models[/dim]")
242
+ if external_models:
243
+ console.print(f"[dim]💡 External API models: {len(external_models)} models[/dim]")
239
244
  console.print("\n[dim]💡 Use --hardware flag to see detailed hardware requirements[/dim]")
245
+ console.print("[dim]💡 See all models: ollamadiffuser registry list[/dim]")
246
+ console.print("[dim]💡 See available models: ollamadiffuser registry list --available-only[/dim]")
240
247
 
241
248
  @cli.command()
242
249
  @click.argument('model_name')
@@ -967,5 +974,336 @@ def create_samples_cmd(force):
967
974
  ctx = click.Context(create_samples)
968
975
  ctx.invoke(create_samples, force=force)
969
976
 
977
+ @cli.group(hidden=True)
978
+ def registry():
979
+ """Manage model registry (internal command)"""
980
+ pass
981
+
982
+ @registry.command()
983
+ @click.option('--format', '-f', type=click.Choice(['table', 'json', 'yaml']), default='table', help='Output format')
984
+ @click.option('--installed-only', is_flag=True, help='Show only installed models')
985
+ @click.option('--available-only', is_flag=True, help='Show only available (not installed) models')
986
+ @click.option('--external-only', is_flag=True, help='Show only externally defined models')
987
+ def list(format: str, installed_only: bool, available_only: bool, external_only: bool):
988
+ """List models in the registry with installation status"""
989
+
990
+ # Get different model categories
991
+ if installed_only:
992
+ models = model_registry.get_installed_models()
993
+ title = "Installed Models"
994
+ elif available_only:
995
+ models = model_registry.get_available_models()
996
+ title = "Available Models (Not Installed)"
997
+ elif external_only:
998
+ models = model_registry.get_external_api_models_only()
999
+ title = "External API Models"
1000
+ else:
1001
+ models = model_registry.get_all_models()
1002
+ title = "All Models (Installed + Available)"
1003
+
1004
+ installed_model_names = set(model_registry.get_installed_models().keys())
1005
+ local_model_names = set(model_registry.get_local_models_only().keys())
1006
+ external_model_names = set(model_registry.get_external_api_models_only().keys())
1007
+ current_model = model_manager.get_current_model()
1008
+
1009
+ if not models:
1010
+ rprint(f"[yellow]No models found in category: {title}[/yellow]")
1011
+ return
1012
+
1013
+ if format == 'table':
1014
+ table = Table(title=title)
1015
+ table.add_column("Model Name", style="cyan", no_wrap=True)
1016
+ table.add_column("Type", style="yellow")
1017
+ table.add_column("Repository", style="blue")
1018
+ table.add_column("Status", style="green")
1019
+ table.add_column("Source", style="magenta")
1020
+
1021
+ for model_name, model_info in models.items():
1022
+ # Check installation status
1023
+ if model_name in installed_model_names:
1024
+ status = "✅ Installed"
1025
+ if model_name == current_model:
1026
+ status += " (current)"
1027
+ else:
1028
+ status = "⬇️ Available"
1029
+
1030
+ # Determine source
1031
+ if model_name in local_model_names and model_name in external_model_names:
1032
+ source = "Local + External"
1033
+ elif model_name in local_model_names:
1034
+ source = "Local"
1035
+ elif model_name in external_model_names:
1036
+ source = "External API"
1037
+ else:
1038
+ source = "Unknown"
1039
+
1040
+ table.add_row(
1041
+ model_name,
1042
+ model_info.get('model_type', 'Unknown'),
1043
+ model_info.get('repo_id', 'Unknown'),
1044
+ status,
1045
+ source
1046
+ )
1047
+
1048
+ console.print(table)
1049
+
1050
+ # Show summary
1051
+ if not (installed_only or available_only or external_only):
1052
+ total_count = len(models)
1053
+ installed_count = len(installed_model_names)
1054
+ available_count = total_count - installed_count
1055
+ local_count = len(local_model_names)
1056
+ external_count = len(external_model_names)
1057
+
1058
+ console.print(f"\n[dim]Summary:[/dim]")
1059
+ console.print(f"[dim] • Total: {total_count} models[/dim]")
1060
+ console.print(f"[dim] • Installed: {installed_count} models[/dim]")
1061
+ console.print(f"[dim] • Available: {available_count} models[/dim]")
1062
+ console.print(f"[dim] • Local registry: {local_count} models[/dim]")
1063
+ console.print(f"[dim] • External API: {external_count} models[/dim]")
1064
+
1065
+ elif format == 'json':
1066
+ import json
1067
+ print(json.dumps(models, indent=2, ensure_ascii=False))
1068
+
1069
+ elif format == 'yaml':
1070
+ import yaml
1071
+ print(yaml.dump(models, default_flow_style=False, allow_unicode=True))
1072
+
1073
+ @registry.command()
1074
+ @click.argument('model_name')
1075
+ @click.argument('repo_id')
1076
+ @click.argument('model_type')
1077
+ @click.option('--variant', help='Model variant (e.g., fp16, bf16)')
1078
+ @click.option('--license-type', help='License type')
1079
+ @click.option('--commercial-use', type=bool, help='Whether commercial use is allowed')
1080
+ @click.option('--save', is_flag=True, help='Save to user configuration file')
1081
+ def add(model_name: str, repo_id: str, model_type: str, variant: Optional[str],
1082
+ license_type: Optional[str], commercial_use: Optional[bool], save: bool):
1083
+ """Add a new model to the registry"""
1084
+
1085
+ model_config = {
1086
+ "repo_id": repo_id,
1087
+ "model_type": model_type
1088
+ }
1089
+
1090
+ if variant:
1091
+ model_config["variant"] = variant
1092
+
1093
+ if license_type or commercial_use is not None:
1094
+ license_info = {}
1095
+ if license_type:
1096
+ license_info["type"] = license_type
1097
+ if commercial_use is not None:
1098
+ license_info["commercial_use"] = commercial_use
1099
+ model_config["license_info"] = license_info
1100
+
1101
+ if model_registry.add_model(model_name, model_config):
1102
+ rprint(f"[green]Model '{model_name}' added to registry successfully![/green]")
1103
+
1104
+ if save:
1105
+ try:
1106
+ # Load existing user models and add the new one
1107
+ user_models = {}
1108
+ config_path = settings.config_dir / "models.json"
1109
+ if config_path.exists():
1110
+ import json
1111
+ with open(config_path, 'r') as f:
1112
+ data = json.load(f)
1113
+ user_models = data.get('models', {})
1114
+
1115
+ user_models[model_name] = model_config
1116
+ model_registry.save_user_config(user_models, config_path)
1117
+ rprint(f"[green]Model configuration saved to {config_path}[/green]")
1118
+ except Exception as e:
1119
+ rprint(f"[red]Failed to save configuration: {e}[/red]")
1120
+ else:
1121
+ rprint(f"[red]Failed to add model '{model_name}' to registry![/red]")
1122
+ sys.exit(1)
1123
+
1124
+ @registry.command()
1125
+ @click.argument('model_name')
1126
+ @click.option('--from-file', is_flag=True, help='Also remove from user configuration file')
1127
+ def remove(model_name: str, from_file: bool):
1128
+ """Remove a model from the registry"""
1129
+
1130
+ if model_registry.remove_model(model_name):
1131
+ rprint(f"[green]Model '{model_name}' removed from registry![/green]")
1132
+
1133
+ if from_file:
1134
+ try:
1135
+ config_path = settings.config_dir / "models.json"
1136
+ if config_path.exists():
1137
+ import json
1138
+ with open(config_path, 'r') as f:
1139
+ data = json.load(f)
1140
+
1141
+ user_models = data.get('models', {})
1142
+ if model_name in user_models:
1143
+ del user_models[model_name]
1144
+ model_registry.save_user_config(user_models, config_path)
1145
+ rprint(f"[green]Model removed from configuration file[/green]")
1146
+ else:
1147
+ rprint(f"[yellow]Model not found in configuration file[/yellow]")
1148
+ else:
1149
+ rprint(f"[yellow]No user configuration file found[/yellow]")
1150
+ except Exception as e:
1151
+ rprint(f"[red]Failed to update configuration file: {e}[/red]")
1152
+ else:
1153
+ rprint(f"[red]Model '{model_name}' not found in registry![/red]")
1154
+ sys.exit(1)
1155
+
1156
+ @registry.command()
1157
+ def reload():
1158
+ """Reload the model registry from configuration files"""
1159
+ try:
1160
+ model_registry.reload()
1161
+ rprint("[green]Model registry reloaded successfully![/green]")
1162
+
1163
+ # Show summary
1164
+ models = model_registry.get_all_models()
1165
+ external_registries = model_registry.get_external_registries()
1166
+
1167
+ rprint(f"[dim]Total models: {len(models)}[/dim]")
1168
+ if external_registries:
1169
+ rprint(f"[dim]External registries: {len(external_registries)}[/dim]")
1170
+ for registry_path in external_registries:
1171
+ rprint(f"[dim] • {registry_path}[/dim]")
1172
+ else:
1173
+ rprint("[dim]No external registries loaded[/dim]")
1174
+
1175
+ except Exception as e:
1176
+ rprint(f"[red]Failed to reload registry: {e}[/red]")
1177
+ sys.exit(1)
1178
+
1179
+ @registry.command()
1180
+ @click.argument('config_file', type=click.Path(exists=True))
1181
+ def import_config(config_file: str):
1182
+ """Import models from a configuration file"""
1183
+ try:
1184
+ from pathlib import Path
1185
+ import json
1186
+ import yaml
1187
+
1188
+ config_path = Path(config_file)
1189
+
1190
+ with open(config_path, 'r', encoding='utf-8') as f:
1191
+ if config_path.suffix.lower() == '.json':
1192
+ data = json.load(f)
1193
+ elif config_path.suffix.lower() in ['.yaml', '.yml']:
1194
+ data = yaml.safe_load(f)
1195
+ else:
1196
+ rprint(f"[red]Unsupported file format: {config_path.suffix}[/red]")
1197
+ sys.exit(1)
1198
+
1199
+ if 'models' not in data:
1200
+ rprint("[red]Configuration file must contain a 'models' section[/red]")
1201
+ sys.exit(1)
1202
+
1203
+ imported_count = 0
1204
+ for model_name, model_config in data['models'].items():
1205
+ if model_registry.add_model(model_name, model_config):
1206
+ imported_count += 1
1207
+ rprint(f"[green]✓ Imported: {model_name}[/green]")
1208
+ else:
1209
+ rprint(f"[red]✗ Failed to import: {model_name}[/red]")
1210
+
1211
+ rprint(f"[green]Successfully imported {imported_count} models[/green]")
1212
+
1213
+ except Exception as e:
1214
+ rprint(f"[red]Failed to import configuration: {e}[/red]")
1215
+ sys.exit(1)
1216
+
1217
+ @registry.command()
1218
+ @click.option('--output', '-o', help='Output file path')
1219
+ @click.option('--format', '-f', type=click.Choice(['json', 'yaml']), default='json', help='Output format')
1220
+ @click.option('--user-only', is_flag=True, help='Export only user-defined models')
1221
+ def export(output: Optional[str], format: str, user_only: bool):
1222
+ """Export model registry to a configuration file"""
1223
+ try:
1224
+ from pathlib import Path
1225
+ import json
1226
+ import yaml
1227
+
1228
+ if user_only:
1229
+ # Only export models from external registries
1230
+ models = {}
1231
+ external_registries = model_registry.get_external_registries()
1232
+ if external_registries:
1233
+ rprint(f"[yellow]User-only export not fully supported yet. Exporting all models.[/yellow]")
1234
+
1235
+ models = model_registry.get_all_models()
1236
+
1237
+ config_data = {"models": models}
1238
+
1239
+ if output:
1240
+ output_path = Path(output)
1241
+ else:
1242
+ if format == 'json':
1243
+ output_path = Path('models.json')
1244
+ else:
1245
+ output_path = Path('models.yaml')
1246
+
1247
+ with open(output_path, 'w', encoding='utf-8') as f:
1248
+ if format == 'json':
1249
+ json.dump(config_data, f, indent=2, ensure_ascii=False)
1250
+ else:
1251
+ yaml.safe_dump(config_data, f, default_flow_style=False, allow_unicode=True)
1252
+
1253
+ rprint(f"[green]Model registry exported to {output_path}[/green]")
1254
+ rprint(f"[dim]Exported {len(models)} models[/dim]")
1255
+
1256
+ except Exception as e:
1257
+ rprint(f"[red]Failed to export registry: {e}[/red]")
1258
+ sys.exit(1)
1259
+
1260
+ @registry.command('check-gguf')
1261
+ def check_gguf():
1262
+ """Check GGUF support status"""
1263
+ from ..core.models.gguf_loader import GGUF_AVAILABLE
1264
+
1265
+ if GGUF_AVAILABLE:
1266
+ rprint("✅ [green]GGUF Support Available[/green]")
1267
+
1268
+ # Show GGUF models
1269
+ models = model_registry.get_all_models()
1270
+ gguf_models = {name: info for name, info in models.items()
1271
+ if model_manager.is_gguf_model(name)}
1272
+
1273
+ if gguf_models:
1274
+ rprint(f"\n🔥 Found {len(gguf_models)} GGUF models:")
1275
+
1276
+ table = Table()
1277
+ table.add_column("Model", style="cyan")
1278
+ table.add_column("Variant", style="yellow")
1279
+ table.add_column("VRAM", style="green")
1280
+ table.add_column("Size", style="blue")
1281
+ table.add_column("Installed", style="red")
1282
+
1283
+ for name, info in gguf_models.items():
1284
+ hw_req = info.get('hardware_requirements', {})
1285
+ installed = "✅" if model_manager.is_model_installed(name) else "❌"
1286
+
1287
+ table.add_row(
1288
+ name,
1289
+ info.get('variant', 'unknown'),
1290
+ f"{hw_req.get('min_vram_gb', '?')}GB",
1291
+ f"{hw_req.get('disk_space_gb', '?')}GB",
1292
+ installed
1293
+ )
1294
+
1295
+ console.print(table)
1296
+
1297
+ rprint("\n📋 [blue]Usage:[/blue]")
1298
+ rprint(" ollamadiffuser pull <model-name> # Download GGUF model")
1299
+ rprint(" ollamadiffuser load <model-name> # Load GGUF model")
1300
+ rprint("\n💡 [yellow]Tip:[/yellow] Start with flux.1-dev-gguf-q4ks for best balance")
1301
+ else:
1302
+ rprint("ℹ️ No GGUF models found in registry")
1303
+ else:
1304
+ rprint("❌ [red]GGUF Support Not Available[/red]")
1305
+ rprint("📦 Install with: [yellow]pip install llama-cpp-python gguf[/yellow]")
1306
+ rprint("🔧 Or install all dependencies: [yellow]pip install -r requirements.txt[/yellow]")
1307
+
970
1308
  if __name__ == '__main__':
971
1309
  cli()