rubber-ducky 1.2.1__tar.gz → 1.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: rubber-ducky
3
- Version: 1.2.1
3
+ Version: 1.3.0
4
4
  Summary: Quick CLI do-it-all tool. Use natural language to spit out bash commands
5
5
  Requires-Python: >=3.10
6
6
  Description-Content-Type: text/markdown
@@ -35,7 +35,7 @@ Requirements:
35
35
  ```
36
36
  ducky # interactive inline session
37
37
  ducky --directory src # preload code from a directory
38
- ducky --model llama3 # use a different Ollama model
38
+ ducky --model qwen3 # use a different Ollama model
39
39
  ```
40
40
 
41
41
  Both `ducky` and `rubber-ducky` executables map to the same CLI, so `uvx rubber-ducky -- <args>` works as well.
@@ -54,6 +54,32 @@ Launching `ducky` with no arguments opens the inline interface:
54
54
 
55
55
  `ducky --directory <path>` streams the contents of the provided directory to the assistant the next time you submit a prompt (the directory is read once at startup).
56
56
 
57
+ ## Crumbs
58
+
59
+ Crumbs are simple scripts that can be executed within Rubber Ducky. They are stored in `~/.ducky/crumbs/` and can be referenced by name in your prompts.
60
+
61
+ To use a crumb, simply mention it in your prompt:
62
+ ```
63
+ Can you use the uv-server crumb to run the HuggingFace prompt renderer?
64
+ ```
65
+
66
+ ### Creating Crumbs
67
+
68
+ To create a new crumb:
69
+
70
+ 1. Create a new directory in `~/.ducky/crumbs/` with your crumb name
71
+ 2. Add an `info.txt` file with metadata:
72
+ ```
73
+ name: your-crumb-name
74
+ type: shell
75
+ description: Brief description of what this crumb does
76
+ ```
77
+ 3. Add your executable script file (e.g., `your-crumb-name.sh`)
78
+ 4. Create a symbolic link in `~/.local/bin` to make it available as a command:
79
+ ```bash
80
+ ln -s ~/.ducky/crumbs/your-crumb-name/your-crumb-name.sh ~/.local/bin/your-crumb-name
81
+ ```
82
+
57
83
  ## Development (uv)
58
84
 
59
85
  ```
@@ -19,7 +19,7 @@ Requirements:
19
19
  ```
20
20
  ducky # interactive inline session
21
21
  ducky --directory src # preload code from a directory
22
- ducky --model llama3 # use a different Ollama model
22
+ ducky --model qwen3 # use a different Ollama model
23
23
  ```
24
24
 
25
25
  Both `ducky` and `rubber-ducky` executables map to the same CLI, so `uvx rubber-ducky -- <args>` works as well.
@@ -38,6 +38,32 @@ Launching `ducky` with no arguments opens the inline interface:
38
38
 
39
39
  `ducky --directory <path>` streams the contents of the provided directory to the assistant the next time you submit a prompt (the directory is read once at startup).
40
40
 
41
+ ## Crumbs
42
+
43
+ Crumbs are simple scripts that can be executed within Rubber Ducky. They are stored in `~/.ducky/crumbs/` and can be referenced by name in your prompts.
44
+
45
+ To use a crumb, simply mention it in your prompt:
46
+ ```
47
+ Can you use the uv-server crumb to run the HuggingFace prompt renderer?
48
+ ```
49
+
50
+ ### Creating Crumbs
51
+
52
+ To create a new crumb:
53
+
54
+ 1. Create a new directory in `~/.ducky/crumbs/` with your crumb name
55
+ 2. Add an `info.txt` file with metadata:
56
+ ```
57
+ name: your-crumb-name
58
+ type: shell
59
+ description: Brief description of what this crumb does
60
+ ```
61
+ 3. Add your executable script file (e.g., `your-crumb-name.sh`)
62
+ 4. Create a symbolic link in `~/.local/bin` to make it available as a command:
63
+ ```bash
64
+ ln -s ~/.ducky/crumbs/your-crumb-name/your-crumb-name.sh ~/.local/bin/your-crumb-name
65
+ ```
66
+
41
67
  ## Development (uv)
42
68
 
43
69
  ```
@@ -0,0 +1,3 @@
1
+ from .ducky import ducky, main
2
+
3
+ __all__ = ["ducky", "main"]
@@ -0,0 +1,60 @@
1
+ import json
2
+ import os
3
+ from pathlib import Path
4
+ from typing import Dict, Any, Optional
5
+
6
+
7
+ class ConfigManager:
8
+ """Manages Ducky configuration including model preferences."""
9
+
10
+ def __init__(self, config_dir: Optional[Path] = None):
11
+ if config_dir is None:
12
+ config_dir = Path.home() / ".ducky"
13
+ self.config_dir = config_dir
14
+ self.config_file = self.config_dir / "config"
15
+ self.config_dir.mkdir(parents=True, exist_ok=True)
16
+
17
+ def load_config(self) -> Dict[str, Any]:
18
+ """Load configuration from file, returning defaults if not found."""
19
+ default_config = {
20
+ "last_model": "qwen3-coder:480b-cloud",
21
+ "last_host": "https://ollama.com"
22
+ }
23
+
24
+ if not self.config_file.exists():
25
+ return default_config
26
+
27
+ try:
28
+ with open(self.config_file, 'r') as f:
29
+ config = json.load(f)
30
+ # Ensure all required keys are present
31
+ for key in default_config:
32
+ if key not in config:
33
+ config[key] = default_config[key]
34
+ return config
35
+ except (json.JSONDecodeError, IOError):
36
+ return default_config
37
+
38
+ def save_config(self, config: Dict[str, Any]) -> None:
39
+ """Save configuration to file."""
40
+ try:
41
+ with open(self.config_file, 'w') as f:
42
+ json.dump(config, f, indent=2)
43
+ except IOError as e:
44
+ print(f"Warning: Could not save config: {e}")
45
+
46
+ def get_last_model(self) -> tuple[str, str]:
47
+ """Get the last used model and host.
48
+
49
+ Returns:
50
+ Tuple of (model_name, host)
51
+ """
52
+ config = self.load_config()
53
+ return config.get("last_model", "qwen3-coder:480b-cloud"), config.get("last_host", "https://ollama.com")
54
+
55
+ def save_last_model(self, model_name: str, host: str) -> None:
56
+ """Save the last used model and host."""
57
+ config = self.load_config()
58
+ config["last_model"] = model_name
59
+ config["last_host"] = host
60
+ self.save_config(config)