loclaude 0.0.1-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/.claude/CLAUDE.md +175 -0
  2. package/CHANGELOG.md +49 -0
  3. package/LICENSE +31 -0
  4. package/README.md +249 -0
  5. package/bin/index.mjs +5 -0
  6. package/bin/index.ts +5 -0
  7. package/docker/docker-compose.yml +85 -0
  8. package/libs/cli/dist/cac.d.ts +6 -0
  9. package/libs/cli/dist/cac.d.ts.map +1 -0
  10. package/libs/cli/dist/commands/config.d.ts +6 -0
  11. package/libs/cli/dist/commands/config.d.ts.map +1 -0
  12. package/libs/cli/dist/commands/docker.d.ts +17 -0
  13. package/libs/cli/dist/commands/docker.d.ts.map +1 -0
  14. package/libs/cli/dist/commands/doctor.d.ts +5 -0
  15. package/libs/cli/dist/commands/doctor.d.ts.map +1 -0
  16. package/libs/cli/dist/commands/index.d.ts +6 -0
  17. package/libs/cli/dist/commands/index.d.ts.map +1 -0
  18. package/libs/cli/dist/commands/init.d.ts +9 -0
  19. package/libs/cli/dist/commands/init.d.ts.map +1 -0
  20. package/libs/cli/dist/commands/models.d.ts +9 -0
  21. package/libs/cli/dist/commands/models.d.ts.map +1 -0
  22. package/libs/cli/dist/config.d.ts +74 -0
  23. package/libs/cli/dist/config.d.ts.map +1 -0
  24. package/libs/cli/dist/constants.d.ts +12 -0
  25. package/libs/cli/dist/constants.d.ts.map +1 -0
  26. package/libs/cli/dist/index.bun.js +3742 -0
  27. package/libs/cli/dist/index.bun.js.map +55 -0
  28. package/libs/cli/dist/index.d.ts +2 -0
  29. package/libs/cli/dist/index.d.ts.map +1 -0
  30. package/libs/cli/dist/index.js +3745 -0
  31. package/libs/cli/dist/index.js.map +55 -0
  32. package/libs/cli/dist/spawn.d.ts +35 -0
  33. package/libs/cli/dist/spawn.d.ts.map +1 -0
  34. package/libs/cli/dist/types.d.ts +10 -0
  35. package/libs/cli/dist/types.d.ts.map +1 -0
  36. package/libs/cli/dist/utils.d.ts +14 -0
  37. package/libs/cli/dist/utils.d.ts.map +1 -0
  38. package/libs/cli/package.json +53 -0
  39. package/package.json +80 -0
@@ -0,0 +1,175 @@
1
+ # Claude Code Instructions
2
+
3
+ Guidelines for working with the loclaude CLI and Docker repository.
4
+
5
+ ## Project Overview
6
+
7
+ **loclaude** is an npm package that runs Claude Code with local Ollama LLMs. It provides:
8
+ - CLI commands for launching Claude Code connected to Ollama
9
+ - Docker container management for Ollama + Open WebUI
10
+ - Project scaffolding with `loclaude init`
11
+ - Cross-runtime support (Bun and Node.js)
12
+
13
+ ## Key Files
14
+
15
+ | File | Purpose |
16
+ |------|---------|
17
+ | `bin/index.ts` | Bun entry point |
18
+ | `bin/index.mjs` | Node.js entry point |
19
+ | `libs/cli/lib/cac.ts` | CLI command definitions |
20
+ | `libs/cli/lib/commands/` | Command implementations |
21
+ | `libs/cli/lib/config.ts` | Configuration system |
22
+ | `libs/cli/lib/spawn.ts` | Cross-runtime process spawning |
23
+ | `docker/docker-compose.yml` | Bundled Docker template |
24
+ | `package.json` | Root package (published to npm) |
25
+ | `libs/cli/package.json` | Internal CLI workspace package |
26
+
27
+ ## Patterns & Conventions
28
+
29
+ ### CLI Commands
30
+
31
+ Command naming follows these conventions:
32
+ - Hyphenated subcommands: `docker-up`, `models-pull`
33
+ - Base commands for listing: `models` (lists models), `config` (shows config)
34
+ - Action suffixes: `-pull`, `-rm`, `-show`, `-run`
35
+
36
+ ### Configuration Priority
37
+
38
+ 1. CLI arguments (highest)
39
+ 2. Environment variables (`OLLAMA_URL`, `OLLAMA_MODEL`, etc.)
40
+ 3. Project config (`./.loclaude/config.json`)
41
+ 4. User config (`~/.config/loclaude/config.json`)
42
+ 5. Default values (lowest)
43
+
44
+ ### Docker Compose
45
+
46
+ - Bundled template in `libs/cli/lib/commands/init.ts`
47
+ - Deployed to user projects via `loclaude init`
48
+ - GPU support uses NVIDIA runtime with `deploy.resources.reservations.devices`
49
+ - Service names: `ollama`, `open-webui`
50
+ - Health checks defined for both services
51
+
52
+ ### Docker Volume Mounts
53
+
54
+ - Ollama models: `./models:/root/.ollama` (persists models on host)
55
+ - Open WebUI data: named volume `open-webui:/app/backend/data`
56
+
57
+ ### Networking
58
+
59
+ Services communicate via Docker's internal network:
60
+ - Open WebUI reaches Ollama at `http://ollama:11434`
61
+ - External access: Ollama on port 11434, WebUI on port 3000
62
+
63
+ ## Common Operations
64
+
65
+ ### Testing CLI Changes
66
+
67
+ ```bash
68
+ # Rebuild
69
+ bun run build
70
+
71
+ # Test commands
72
+ bun bin/index.ts doctor
73
+ node bin/index.mjs config
74
+
75
+ # Test in a fresh directory
76
+ mkdir /tmp/test-loclaude && cd /tmp/test-loclaude
77
+ bun ~/path/to/ollama/bin/index.ts init
78
+ ```
79
+
80
+ ### Local Development Workflow
81
+
82
+ ```bash
83
+ # Start containers
84
+ loclaude docker-up # or: bun bin/index.ts docker-up
85
+
86
+ # Pull a model
87
+ loclaude models-pull qwen3-coder:30b
88
+
89
+ # Run Claude Code with local Ollama
90
+ loclaude run
91
+ ```
92
+
93
+ ## CLI Development
94
+
95
+ The `loclaude` CLI is built in `libs/cli/` as an internal workspace package.
96
+
97
+ ### Architecture
98
+
99
+ ```
100
+ libs/cli/
101
+ ├── lib/
102
+ │ ├── cac.ts # CLI definition using cac
103
+ │ ├── commands/ # Command implementations
104
+ │ │ ├── init.ts # Project scaffolding
105
+ │ │ ├── doctor.ts # System checks
106
+ │ │ ├── docker.ts # Container management
107
+ │ │ ├── models.ts # Ollama model operations
108
+ │ │ └── config.ts # Config display
109
+ │ ├── config.ts # Configuration loading/merging
110
+ │ ├── spawn.ts # Cross-runtime process spawning
111
+ │ ├── utils.ts # Ollama API utilities
112
+ │ └── constants.ts # Default values
113
+ ├── build/ # Bun build configuration
114
+ └── dist/ # Built bundles (index.js, index.bun.js)
115
+ ```
116
+
117
+ ### Building
118
+
119
+ ```bash
120
+ bun run build # Build all packages via turbo
121
+ cd libs/cli && bun run build # Build CLI only
122
+ ```
123
+
124
+ ### Testing Locally
125
+
126
+ ```bash
127
+ # Direct execution
128
+ bun bin/index.ts --help
129
+ node bin/index.mjs --help
130
+
131
+ # Test specific commands
132
+ bun bin/index.ts doctor
133
+ node bin/index.mjs config
134
+ ```
135
+
136
+ ### Adding New Commands
137
+
138
+ 1. Create command file in `libs/cli/lib/commands/`
139
+ 2. Export functions from `libs/cli/lib/commands/index.ts`
140
+ 3. Register with cac in `libs/cli/lib/cac.ts`
141
+ 4. Rebuild: `bun run build`
142
+
143
+ ### Cross-Runtime Spawning
144
+
145
+ Use the `spawn` function from `./spawn.ts` instead of `Bun.spawn()` directly:
146
+
147
+ ```typescript
148
+ import { spawn, spawnCapture } from "./spawn";
149
+
150
+ // Inherit stdio (for interactive commands)
151
+ await spawn(["docker", "compose", "up"], { env: process.env });
152
+
153
+ // Capture output
154
+ const result = await spawnCapture(["docker", "--version"]);
155
+ console.log(result.stdout);
156
+ ```
157
+
158
+ ### Releasing
159
+
160
+ ```bash
161
+ # Run pre-release checks
162
+ bun run prerelease-check
163
+
164
+ # Publish to npm
165
+ bun run release # Publish as latest
166
+ bun run release:rc # Publish with rc tag
167
+ bun run release:beta # Publish with beta tag
168
+ ```
169
+
170
+ ## Do Not
171
+
172
+ - Commit anything in `docker/models/` (gitignored, contains large model files)
173
+ - Use `Bun.spawn()` directly - use `spawn()` from `./spawn.ts` for cross-runtime support
174
+ - Hardcode Ollama URLs - always use `getOllamaUrl()` from config
175
+ - Forget to export new commands from `libs/cli/lib/commands/index.ts`
package/CHANGELOG.md ADDED
@@ -0,0 +1,49 @@
1
+ # Changelog
2
+
3
+ All notable changes to this project will be documented in this file.
4
+
5
+ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
6
+ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
+
8
+ ## [Unreleased]
9
+
10
+ ## [0.0.1-rc.1] - 2025-01-19
11
+
12
+ ### Added
13
+
14
+ - **CLI Commands**
15
+ - `loclaude run` - Run Claude Code with local Ollama (interactive model selection)
16
+ - `loclaude init` - Scaffold docker-compose.yml, config, and mise.toml
17
+ - `loclaude doctor` - Check system prerequisites (Docker, GPU, Claude CLI)
18
+ - `loclaude config` / `loclaude config-paths` - View configuration
19
+ - `loclaude docker-up/down/status/logs/restart` - Docker container management
20
+ - `loclaude models` - List installed Ollama models
21
+ - `loclaude models-pull/rm/show/run` - Model management commands
22
+
23
+ - **Configuration System**
24
+ - Project-local config: `./.loclaude/config.json`
25
+ - User global config: `~/.config/loclaude/config.json`
26
+ - Environment variable support (`OLLAMA_URL`, `OLLAMA_MODEL`, etc.)
27
+ - Layered config merging with clear priority
28
+
29
+ - **Cross-Runtime Support**
30
+ - Works with both Bun and Node.js runtimes
31
+ - Dual entry points: `bin/index.ts` (Bun) and `bin/index.mjs` (Node)
32
+
33
+ - **Docker Integration**
34
+ - Bundled docker-compose.yml template with Ollama + Open WebUI
35
+ - NVIDIA GPU support out of the box
36
+ - Health checks for both services
37
+
38
+ - **Project Scaffolding**
39
+ - `loclaude init` creates complete project structure
40
+ - Generates mise.toml with task aliases
41
+ - Creates .claude/CLAUDE.md for Claude Code instructions
42
+ - Sets up .gitignore for model directory
43
+
44
+ ### Notes
45
+
46
+ This is a release candidate. The API and command structure may change before 1.0.
47
+
48
+ [Unreleased]: https://github.com/nicholasgalante1997/docker-ollama/compare/v0.0.1-rc.1...HEAD
49
+ [0.0.1-rc.1]: https://github.com/nicholasgalante1997/docker-ollama/releases/tag/v0.0.1-rc.1
package/LICENSE ADDED
@@ -0,0 +1,31 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Mega Blastoise
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
22
+
23
+ ---
24
+
25
+ Addendum, January 19, 2026
26
+
27
+ This package leverages the [Bun](bun.sh) shell to run shell commands on behalf of the end user.
28
+
29
+ In an ideal world, I don't have to write this addendum. In this world, I do.
30
+
31
+ If you pass unsanitized input into this command line tool, all bets are off. I'll make no guarantees on your behalf about behavior or safety. I'll personally never talk to you in real life. May god have mercy on your soul.
package/README.md ADDED
@@ -0,0 +1,249 @@
1
+ # loclaude
2
+
3
+ Run [Claude Code](https://docs.anthropic.com/en/docs/claude-code) with local [Ollama](https://ollama.ai/) LLMs.
4
+
5
+ loclaude provides a CLI to:
6
+ - Launch Claude Code sessions connected to your local Ollama instance
7
+ - Manage Ollama + Open WebUI Docker containers
8
+ - Pull and manage Ollama models
9
+ - Scaffold new projects with opinionated Docker configs
10
+
11
+ ## Installation
12
+
13
+ ```bash
14
+ # With npm (requires Node.js 18+)
15
+ npm install -g loclaude
16
+
17
+ # With bun (recommended)
18
+ bun install -g loclaude
19
+ ```
20
+
21
+ ## Prerequisites
22
+
23
+ - [Docker](https://docs.docker.com/get-docker/) with Docker Compose v2
24
+ - [NVIDIA GPU](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) with drivers and container toolkit
25
+ - [Claude Code CLI](https://docs.anthropic.com/en/docs/claude-code) installed (`npm install -g @anthropic-ai/claude-code`)
26
+
27
+ Check your setup with:
28
+
29
+ ```bash
30
+ loclaude doctor
31
+ ```
32
+
33
+ ## Quick Start
34
+
35
+ ```bash
36
+ # Initialize a new project with Docker configs
37
+ loclaude init
38
+
39
+ # Start Ollama + Open WebUI containers
40
+ loclaude docker-up
41
+
42
+ # Pull a model
43
+ loclaude models-pull qwen3-coder:30b
44
+
45
+ # Run Claude Code with local LLM (interactive model selection)
46
+ loclaude run
47
+ ```
48
+
49
+ ## Commands
50
+
51
+ ### Running Claude Code
52
+
53
+ ```bash
54
+ loclaude run # Interactive model selection
55
+ loclaude run -m qwen3-coder:30b # Use specific model
56
+ loclaude run -- --help # Pass args to claude
57
+ ```
58
+
59
+ ### Project Setup
60
+
61
+ ```bash
62
+ loclaude init # Scaffold docker-compose.yml, config, mise.toml
63
+ loclaude init --force # Overwrite existing files
64
+ loclaude init --no-webui # Skip Open WebUI in compose file
65
+ ```
66
+
67
+ ### Docker Management
68
+
69
+ ```bash
70
+ loclaude docker-up # Start containers (detached)
71
+ loclaude docker-up --no-detach # Start in foreground
72
+ loclaude docker-down # Stop containers
73
+ loclaude docker-status # Show container status
74
+ loclaude docker-logs # Show logs
75
+ loclaude docker-logs --follow # Follow logs
76
+ loclaude docker-restart # Restart containers
77
+ ```
78
+
79
+ ### Model Management
80
+
81
+ ```bash
82
+ loclaude models # List installed models
83
+ loclaude models-pull <name> # Pull a model
84
+ loclaude models-rm <name> # Remove a model
85
+ loclaude models-show <name> # Show model details
86
+ loclaude models-run <name> # Run model interactively (ollama CLI)
87
+ ```
88
+
89
+ ### Diagnostics
90
+
91
+ ```bash
92
+ loclaude doctor # Check prerequisites
93
+ loclaude config # Show current configuration
94
+ loclaude config-paths # Show config file search paths
95
+ ```
96
+
97
+ ## Configuration
98
+
99
+ loclaude supports configuration via files and environment variables.
100
+
101
+ ### Config Files
102
+
103
+ Config files are loaded in priority order:
104
+
105
+ 1. `./.loclaude/config.json` (project-local)
106
+ 2. `~/.config/loclaude/config.json` (user global)
107
+
108
+ Example config:
109
+
110
+ ```json
111
+ {
112
+ "ollama": {
113
+ "url": "http://localhost:11434",
114
+ "defaultModel": "qwen3-coder:30b"
115
+ },
116
+ "docker": {
117
+ "composeFile": "./docker-compose.yml",
118
+ "gpu": true
119
+ },
120
+ "claude": {
121
+ "extraArgs": ["--verbose"]
122
+ }
123
+ }
124
+ ```
125
+
126
+ ### Environment Variables
127
+
128
+ | Variable | Description | Default |
129
+ |----------|-------------|---------|
130
+ | `OLLAMA_URL` | Ollama API endpoint | `http://localhost:11434` |
131
+ | `OLLAMA_MODEL` | Default model name | `qwen3-coder:30b` |
132
+ | `LOCLAUDE_COMPOSE_FILE` | Path to docker-compose.yml | `./docker-compose.yml` |
133
+ | `LOCLAUDE_GPU` | Enable GPU (`true`/`false`) | `true` |
134
+
135
+ ### Priority
136
+
137
+ Configuration is merged in this order (highest priority first):
138
+
139
+ 1. CLI arguments
140
+ 2. Environment variables
141
+ 3. Project config (`./.loclaude/config.json`)
142
+ 4. User config (`~/.config/loclaude/config.json`)
143
+ 5. Default values
144
+
145
+ ## Service URLs
146
+
147
+ When containers are running:
148
+
149
+ | Service | URL | Description |
150
+ |---------|-----|-------------|
151
+ | Ollama API | http://localhost:11434 | LLM inference API |
152
+ | Open WebUI | http://localhost:3000 | Chat interface |
153
+
154
+ ## Project Structure
155
+
156
+ After running `loclaude init`:
157
+
158
+ ```
159
+ .
160
+ ├── .claude/
161
+ │ └── CLAUDE.md # Claude Code instructions
162
+ ├── .loclaude/
163
+ │ └── config.json # Loclaude configuration
164
+ ├── models/ # Ollama model storage (gitignored)
165
+ ├── docker-compose.yml # Container definitions
166
+ ├── mise.toml # Task runner configuration
167
+ └── README.md
168
+ ```
169
+
170
+ ## Using with mise
171
+
172
+ The `init` command creates a `mise.toml` with convenient task aliases:
173
+
174
+ ```bash
175
+ mise run up # loclaude docker-up
176
+ mise run down # loclaude docker-down
177
+ mise run claude # loclaude run
178
+ mise run pull <model> # loclaude models-pull <model>
179
+ mise run doctor # loclaude doctor
180
+ ```
181
+
182
+ ## Troubleshooting
183
+
184
+ ### Check System Requirements
185
+
186
+ ```bash
187
+ loclaude doctor
188
+ ```
189
+
190
+ This verifies:
191
+ - Docker and Docker Compose installation
192
+ - NVIDIA GPU detection
193
+ - NVIDIA Container Toolkit
194
+ - Claude Code CLI
195
+ - Ollama API connectivity
196
+
197
+ ### Container Issues
198
+
199
+ ```bash
200
+ # View logs
201
+ loclaude docker-logs --follow
202
+
203
+ # Restart containers
204
+ loclaude docker-restart
205
+
206
+ # Full reset
207
+ loclaude docker-down && loclaude docker-up
208
+ ```
209
+
210
+ ### Connection Issues
211
+
212
+ If Claude Code can't connect to Ollama:
213
+
214
+ 1. Verify Ollama is running: `loclaude docker-status`
215
+ 2. Check the API: `curl http://localhost:11434/api/tags`
216
+ 3. Verify your config: `loclaude config`
217
+
218
+ ## Development
219
+
220
+ ### Building from Source
221
+
222
+ ```bash
223
+ git clone https://github.com/nicholasgalante1997/docker-ollama.git loclaude
224
+ cd loclaude
225
+ bun install
226
+ bun run build
227
+ ```
228
+
229
+ ### Running Locally
230
+
231
+ ```bash
232
+ # With bun (direct)
233
+ bun bin/index.ts --help
234
+
235
+ # With node (built)
236
+ node bin/index.mjs --help
237
+ ```
238
+
239
+ ### Testing
240
+
241
+ ```bash
242
+ # Test both runtimes
243
+ bun bin/index.ts doctor
244
+ node bin/index.mjs doctor
245
+ ```
246
+
247
+ ## License
248
+
249
+ MIT
package/bin/index.mjs ADDED
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env node
2
+
3
+ import { run_cli } from "@loclaude-internal/cli";
4
+
5
+ run_cli();
package/bin/index.ts ADDED
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env bun
2
+
3
+ import { run_cli } from "@loclaude-internal/cli/bun.js";
4
+
5
+ run_cli();
@@ -0,0 +1,85 @@
1
+ services:
2
+ ollama:
3
+ image: ollama/ollama:latest
4
+ container_name: ollama
5
+ # Use nvidia runtime for GPU acceleration
6
+ # This enables access to Nvidia GPUs from within the container
7
+ runtime: nvidia
8
+ environment:
9
+ - NVIDIA_VISIBLE_DEVICES=all # Make all GPUs visible to the container
10
+ - NVIDIA_DRIVER_CAPABILITIES=compute,utility # Grant compute and utility capabilities (needed for GPU inference)
11
+ # OPTIONAL: Set memory limits for Ollama process (in bytes)
12
+ # Uncomment if you want to prevent Ollama from consuming unlimited RAM
13
+ # - OLLAMA_MAX_LOADED_MODELS=1
14
+ # - OLLAMA_NUM_PARALLEL=1
15
+
16
+ # OPTIONAL: Set log level for debugging
17
+ # - OLLAMA_DEBUG=1
18
+
19
+ # Volume mounts: maps host directories/files into the container
20
+ volumes:
21
+ # Map the models directory so they persist on your host
22
+ # Models downloaded in container go to /root/.ollama, we mount it to ./models on host
23
+ - ./models:/root/.ollama
24
+
25
+ # Keep container time in sync with host (good practice)
26
+ # - /etc/localtime:/etc/localtime:ro
27
+
28
+ # OPTIONAL: Mount a custom config directory
29
+ # Uncomment if you want to customize Ollama settings
30
+ # - ./config:/root/.ollama/config
31
+
32
+ ports:
33
+ - "11434:11434"
34
+ restart: unless-stopped
35
+ healthcheck:
36
+ test: ["CMD", "ollama", "list"]
37
+ interval: 300s
38
+ timeout: 2s
39
+ retries: 3
40
+ start_period: 40s
41
+
42
+ # OPTIONAL: Resource limits and reservations
43
+ # Uncomment to constrain CPU and memory usage
44
+ deploy:
45
+ resources:
46
+ # limits:
47
+ # cpus: '4' # Limit to 4 CPU cores
48
+ # memory: 32G # Limit to 32GB RAM
49
+ reservations:
50
+ # cpus: '2' # Reserve at least 2 CPU cores
51
+ # memory: 16G # Reserve at least 16GB RAM
52
+ devices:
53
+ - driver: nvidia
54
+ count: all # Use all available GPUs
55
+ capabilities: [gpu]
56
+ open-webui:
57
+ image: ghcr.io/open-webui/open-webui:cuda # For Nvidia GPU support, you change the image from ghcr.io/open-webui/open-webui:main to ghcr.io/open-webui/open-webui:cuda
58
+ container_name: open-webui
59
+ ports:
60
+ - "3000:8080"
61
+ environment:
62
+ # Point Open WebUI to the Ollama service
63
+ # Use the service name (ollama) as the hostname since they're on the same Docker network
64
+ - OLLAMA_BASE_URL=http://ollama:11434
65
+ depends_on:
66
+ - ollama # Ensure Ollama starts before Open WebUI
67
+ restart: unless-stopped
68
+ healthcheck:
69
+ test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
70
+ interval: 30s
71
+ timeout: 10s
72
+ retries: 3
73
+ start_period: 60s
74
+ volumes:
75
+ - open-webui:/app/backend/data
76
+ deploy:
77
+ resources:
78
+ reservations:
79
+ devices:
80
+ - driver: nvidia
81
+ count: all
82
+ capabilities: [gpu]
83
+
84
+ volumes:
85
+ open-webui:
@@ -0,0 +1,6 @@
1
+ declare const cli: import("cac").CAC;
2
+ export declare const help: () => void;
3
+ export declare const version: () => void;
4
+ export declare const run_cli: () => void;
5
+ export { cli };
6
+ //# sourceMappingURL=cac.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cac.d.ts","sourceRoot":"","sources":["../lib/cac.ts"],"names":[],"mappings":"AAqBA,QAAA,MAAM,GAAG,mBAAkB,CAAC;AAoI5B,eAAO,MAAM,IAAI,YAAyB,CAAC;AAC3C,eAAO,MAAM,OAAO,YAA4B,CAAC;AAEjD,eAAO,MAAM,OAAO,QAAO,IAE1B,CAAC;AAEF,OAAO,EAAE,GAAG,EAAE,CAAC"}
@@ -0,0 +1,6 @@
1
+ /**
2
+ * config command - Show and manage configuration
3
+ */
4
+ export declare function configShow(): Promise<void>;
5
+ export declare function configPaths(): Promise<void>;
6
+ //# sourceMappingURL=config.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../lib/commands/config.ts"],"names":[],"mappings":"AAAA;;GAEG;AAKH,wBAAsB,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC,CAahD;AAED,wBAAsB,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC,CAgBjD"}
@@ -0,0 +1,17 @@
1
+ /**
2
+ * docker command - Manage Docker containers
3
+ */
4
+ export interface DockerOptions {
5
+ file?: string;
6
+ detach?: boolean;
7
+ }
8
+ export declare function dockerUp(options?: DockerOptions): Promise<void>;
9
+ export declare function dockerDown(options?: DockerOptions): Promise<void>;
10
+ export declare function dockerStatus(options?: DockerOptions): Promise<void>;
11
+ export declare function dockerLogs(options?: DockerOptions & {
12
+ follow?: boolean;
13
+ service?: string;
14
+ }): Promise<void>;
15
+ export declare function dockerRestart(options?: DockerOptions): Promise<void>;
16
+ export declare function dockerExec(service: string, command: string[], options?: DockerOptions): Promise<number>;
17
+ //# sourceMappingURL=docker.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"docker.d.ts","sourceRoot":"","sources":["../../lib/commands/docker.ts"],"names":[],"mappings":"AAAA;;GAEG;AAgEH,MAAM,WAAW,aAAa;IAC5B,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,MAAM,CAAC,EAAE,OAAO,CAAC;CAClB;AAeD,wBAAsB,QAAQ,CAAC,OAAO,GAAE,aAAkB,GAAG,OAAO,CAAC,IAAI,CAAC,CAiBzE;AAED,wBAAsB,UAAU,CAAC,OAAO,GAAE,aAAkB,GAAG,OAAO,CAAC,IAAI,CAAC,CAS3E;AAED,wBAAsB,YAAY,CAAC,OAAO,GAAE,aAAkB,GAAG,OAAO,CAAC,IAAI,CAAC,CAG7E;AAED,wBAAsB,UAAU,CAC9B,OAAO,GAAE,aAAa,GAAG;IAAE,MAAM,CAAC,EAAE,OAAO,CAAC;IAAC,OAAO,CAAC,EAAE,MAAM,CAAA;CAAO,GACnE,OAAO,CAAC,IAAI,CAAC,CAaf;AAED,wBAAsB,aAAa,CAAC,OAAO,GAAE,aAAkB,GAAG,OAAO,CAAC,IAAI,CAAC,CAS9E;AAED,wBAAsB,UAAU,CAC9B,OAAO,EAAE,MAAM,EACf,OAAO,EAAE,MAAM,EAAE,EACjB,OAAO,GAAE,aAAkB,GAC1B,OAAO,CAAC,MAAM,CAAC,CAWjB"}
@@ -0,0 +1,5 @@
1
+ /**
2
+ * doctor command - Check prerequisites and system health
3
+ */
4
+ export declare function doctor(): Promise<void>;
5
+ //# sourceMappingURL=doctor.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"doctor.d.ts","sourceRoot":"","sources":["../../lib/commands/doctor.ts"],"names":[],"mappings":"AAAA;;GAEG;AAuMH,wBAAsB,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC,CA+B5C"}
@@ -0,0 +1,6 @@
1
+ export { init } from './init';
2
+ export { doctor } from './doctor';
3
+ export { configShow, configPaths } from './config';
4
+ export { dockerUp, dockerDown, dockerStatus, dockerLogs, dockerRestart, dockerExec } from './docker';
5
+ export { modelsList, modelsPull, modelsRm, modelsShow, modelsRun } from './models';
6
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../lib/commands/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,QAAQ,CAAC;AAC9B,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAClC,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,MAAM,UAAU,CAAC;AACnD,OAAO,EAAE,QAAQ,EAAE,UAAU,EAAE,YAAY,EAAE,UAAU,EAAE,aAAa,EAAE,UAAU,EAAE,MAAM,UAAU,CAAC;AACrG,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,QAAQ,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC"}
@@ -0,0 +1,9 @@
1
+ /**
2
+ * init command - Scaffold docker-compose.yml and config
3
+ */
4
+ export interface InitOptions {
5
+ force?: boolean;
6
+ noWebui?: boolean;
7
+ }
8
+ export declare function init(options?: InitOptions): Promise<void>;
9
+ //# sourceMappingURL=init.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"init.d.ts","sourceRoot":"","sources":["../../lib/commands/init.ts"],"names":[],"mappings":"AAAA;;GAEG;AA0TH,MAAM,WAAW,WAAW;IAC1B,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,OAAO,CAAC,EAAE,OAAO,CAAC;CACnB;AAED,wBAAsB,IAAI,CAAC,OAAO,GAAE,WAAgB,GAAG,OAAO,CAAC,IAAI,CAAC,CAqGnE"}