1bcoder 0.1.1__tar.gz → 0.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. {1bcoder-0.1.1 → 1bcoder-0.1.2/1bcoder.egg-info}/PKG-INFO +35 -25
  2. 1bcoder-0.1.1/README.md → 1bcoder-0.1.2/PKG-INFO +46 -24
  3. 1bcoder-0.1.1/1bcoder.egg-info/PKG-INFO → 1bcoder-0.1.2/README.md +33 -36
  4. {1bcoder-0.1.1 → 1bcoder-0.1.2}/chat.py +157 -78
  5. {1bcoder-0.1.1 → 1bcoder-0.1.2}/pyproject.toml +2 -1
  6. {1bcoder-0.1.1 → 1bcoder-0.1.2}/1bcoder.egg-info/SOURCES.txt +0 -0
  7. {1bcoder-0.1.1 → 1bcoder-0.1.2}/1bcoder.egg-info/dependency_links.txt +0 -0
  8. {1bcoder-0.1.1 → 1bcoder-0.1.2}/1bcoder.egg-info/entry_points.txt +0 -0
  9. {1bcoder-0.1.1 → 1bcoder-0.1.2}/1bcoder.egg-info/requires.txt +0 -0
  10. {1bcoder-0.1.1 → 1bcoder-0.1.2}/1bcoder.egg-info/top_level.txt +0 -0
  11. {1bcoder-0.1.1 → 1bcoder-0.1.2}/LICENSE +0 -0
  12. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/__init__.py +0 -0
  13. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/agents/advance.txt +0 -0
  14. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/agents/ask.txt +0 -0
  15. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/agents/fill.txt +0 -0
  16. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/agents/planning.txt +0 -0
  17. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/agents/sqlite.txt +0 -0
  18. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/aliases.txt +0 -0
  19. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/doc/MCP.md +0 -0
  20. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/doc/PARAM.md +0 -0
  21. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/doc/PROC.md +0 -0
  22. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/map.txt +0 -0
  23. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/proc/add-save.py +0 -0
  24. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/proc/collect-files.py +0 -0
  25. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/proc/extract-code.py +0 -0
  26. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/proc/extract-files.py +0 -0
  27. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/proc/extract-list.py +0 -0
  28. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/proc/grounding-check.py +0 -0
  29. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/proc/md.py +0 -0
  30. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/proc/mdx.py +0 -0
  31. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/proc/regexp-extract.py +0 -0
  32. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/profiles.txt +0 -0
  33. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/prompts/analysis.txt +0 -0
  34. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/prompts/sumarise.txt +0 -0
  35. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/prompts.txt +0 -0
  36. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/AddFunction.txt +0 -0
  37. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/AskProject.txt +0 -0
  38. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/CheckRequirements.txt +0 -0
  39. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/DockerMySQL.txt +0 -0
  40. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/DockerNginx.txt +0 -0
  41. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/DockerPython.txt +0 -0
  42. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/DockerStack.txt +0 -0
  43. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/DuckDuckGoInstant.txt +0 -0
  44. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/EnvTemplate.txt +0 -0
  45. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/Explain.txt +0 -0
  46. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/ExploreProjectStructure.txt +0 -0
  47. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/GitIgnorePython.txt +0 -0
  48. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/MySQLDump.txt +0 -0
  49. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/NewScript.txt +0 -0
  50. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/PipFreeze.txt +0 -0
  51. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/PyPI.txt +0 -0
  52. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/Refactor.txt +0 -0
  53. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/RunAndFix.txt +0 -0
  54. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/SQLiteSchema.txt +0 -0
  55. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/WikiPage.txt +0 -0
  56. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/WikiSearch.txt +0 -0
  57. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/parallel_call.txt +0 -0
  58. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/personal/content/create-regular-content.txt +0 -0
  59. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/personal/content/plan.txt +0 -0
  60. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/personal/test/collect-data-from-test-environment.txt +0 -0
  61. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/plan.txt +0 -0
  62. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/remote/create-content-on-remote-server.txt +0 -0
  63. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/set_ctx.txt +0 -0
  64. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/team-map-worker.txt +0 -0
  65. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/team-search-worker.txt +0 -0
  66. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/team-summarize.txt +0 -0
  67. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/team-tree-worker.txt +0 -0
  68. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/scripts/test.txt +0 -0
  69. {1bcoder-0.1.1 → 1bcoder-0.1.2}/_bcoder_data/teams/code-analysis.yaml +0 -0
  70. {1bcoder-0.1.1 → 1bcoder-0.1.2}/map_index.py +0 -0
  71. {1bcoder-0.1.1 → 1bcoder-0.1.2}/map_query.py +0 -0
  72. {1bcoder-0.1.1 → 1bcoder-0.1.2}/setup.cfg +0 -0
@@ -1,6 +1,7 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: 1bcoder
3
- Version: 0.1.1
3
+ Version: 0.1.2
4
+ Summary: AI coding assistant agent for 1B–7B local models (Ollama, LMStudio, llama.cpp). Terminal REPL with file editing, project map, agents, scripts, and parallel multi-model queries.
4
5
  Requires-Python: >=3.10
5
6
  Description-Content-Type: text/markdown
6
7
  License-File: LICENSE
@@ -12,15 +13,7 @@ Dynamic: license-file
12
13
 
13
14
  # 1bcoder
14
15
 
15
- AI-assisted code editor designed for small (1B parameter) language models running locally via [Ollama](https://ollama.com), [LMStudio](https://lmstudio.ai), or [LiteLLM](https://litellm.ai).
16
-
17
- ---
18
-
19
- **(c) 2026 Stanislav Zholobetskyi**
20
- Institute for Information Recording, National Academy of Sciences of Ukraine, Kyiv
21
-
22
- *Створено в рамках аспірантського дослідження на тему:
23
- «Інтелектуальна технологія підтримки розробки та супроводу програмних продуктів»*
16
+ AI coding assistant agent for 1B–7B local models running locally via [Ollama](https://ollama.com), [LMStudio](https://lmstudio.ai), or [LiteLLM](https://litellm.ai).
24
17
 
25
18
  ---
26
19
 
@@ -93,7 +86,7 @@ Tasks that require the model to decide *what to look at* — refactoring across
93
86
  - **`/plan <goal>`** — planning agent: researches the project, writes a natural-language step-by-step plan to `plan.txt`; run `/agent <task> plan plan.txt` to execute it step by step
94
87
  - **`/fill`** — fill agent: reads NaN session variables, scans project for `.var` files and config files, sets each value automatically
95
88
  - **Session variables** — `{{name}}` placeholders substituted in any command; save/load from `.var` files for offline reuse without loading files into context
96
- - **Project config** — `/config save` persists session state (host, model, ctx, params, vars, procs) to `.1bcoder/config.yml`; auto-loaded on startup when `auto: true`
89
+ - **Project config** — `/config save` persists session state (host, model, ctx, params, vars, procs) to `.1bcoder/config.yml`; `/config save global` saves to `~/.1bcoder/config.yml`; on startup, the first config with `auto: true` (local → global) is applied automatically
97
90
  - **Aliases** — define command shortcuts with `/alias /name = expansion` (supports `{{args}}`); persisted in `aliases.txt`; loaded from global then project directory at startup and survive `/clear`
98
91
  - **Backup/restore** — `/bkup save` rotates existing backups (`file.bkup` → `file.bkup(1)`, `file.bkup(2)`…) so no snapshot is ever overwritten; `/bkup restore` always restores the latest
99
92
  - **MCP support** — connect external tool servers (filesystem, web, git, database, browser…) via the Model Context Protocol
@@ -184,15 +177,15 @@ pip install -e .
184
177
  python chat.py
185
178
  ```
186
179
 
187
- On startup a numbered list of available Ollama models is shown type the number to select one. Use `--model` to skip the prompt.
180
+ On startup, 1bcoder checks for a config with `auto: true` (local `.1bcoder/config.yml` first, then `~/.1bcoder/config.yml`) and connects to the host and model stored there. If no config is found, it connects to local Ollama and prompts for a model. Use `--model` or `--host` to override.
188
181
 
189
182
  ### CLI options
190
183
 
191
184
  ```
192
185
  1bcoder [--host URL] [--model NAME] [--init] [--scriptapply SCRIPT] [--param KEY=VALUE]
193
186
 
194
- --host URL Host URL — supports ollama:// and openai:// schemes (default: http://localhost:11434)
195
- --model NAME Skip model selection, use this model directly
187
+ --host URL Host URL — supports ollama:// and openai:// schemes (default: from config or http://localhost:11434)
188
+ --model NAME Model to use; overrides config (shows list if not available on host)
196
189
  --init Create .1bcoder/ scaffold in the current directory
197
190
  --scriptapply SCRIPT Run a script file non-interactively, then exit
198
191
  --param KEY=VALUE Plan parameter substitution (repeatable)
@@ -801,17 +794,28 @@ summarize this for me -> myplan # capture LLM reply
801
794
 
802
795
  ### Project config (`/config`)
803
796
 
804
- Save and restore session state (host, model, ctx, params, vars, procs) to `.1bcoder/config.yml` in the current working directory. Useful for project-specific presets that are too large to fit in model context.
797
+ Save and restore session state (host, model, ctx, params, vars, procs). Two config locations are supported:
798
+
799
+ - **Local** — `.1bcoder/config.yml` in the current working directory (project-specific)
800
+ - **Global** — `~/.1bcoder/config.yml` (user-wide default for all projects)
801
+
802
+ **Startup priority:** on launch without `--host`/`--model`, 1bcoder checks local config first, then global. The first one with `auto: true` wins. If neither has `auto: true`, connects to local Ollama and prompts for a model.
805
803
 
806
804
  ```
807
- /config save # save all current state
808
- /config save host # save only host
809
- /config save model # save only model
810
- /config save vars # save only vars
811
- /config load # restore from config.yml
812
- /config show # print config.yml contents
813
- /config auto on # auto-load on every startup in this directory
814
- /config auto off # disable auto-load
805
+ /config save # save all current state to local config
806
+ /config save global # save all current state to global config
807
+ /config save host # save only host to local config
808
+ /config save global host # save only host to global config
809
+ /config save model # save only model to local config
810
+ /config save global model # save only model to global config
811
+ /config save vars # save only vars to local config
812
+ /config load # restore from local config
813
+ /config load global # restore from global config
814
+ /config show # print local config contents
815
+ /config show global # print global config contents
816
+ /config auto on # enable auto-load in local config
817
+ /config auto on global # enable auto-load in global config
818
+ /config auto off # disable auto-load in local config
815
819
  ```
816
820
 
817
821
  **Selective delete:**
@@ -824,7 +828,7 @@ Save and restore session state (host, model, ctx, params, vars, procs) to `.1bco
824
828
  /config del proc collect-files # remove one proc
825
829
  ```
826
830
 
827
- **Config file format** (`.1bcoder/config.yml`):
831
+ **Config file format** (`.1bcoder/config.yml` or `~/.1bcoder/config.yml`):
828
832
  ```yaml
829
833
  auto: true
830
834
  host: ollama://localhost:11434
@@ -840,7 +844,7 @@ procs:
840
844
  - collect-files output.txt
841
845
  ```
842
846
 
843
- When `auto: true`, the config is applied automatically after the startup banner host, model, ctx, params, vars, and procs are restored without any command.
847
+ When `auto: true`, host and model are used at startup to connect; ctx, params, vars, and procs are also restored.
844
848
 
845
849
  ---
846
850
 
@@ -1187,3 +1191,9 @@ For human input, the corrected command is shown with `[fix?]` and you are asked
1187
1191
  | text-generation-webui | Linux / Win | `--api` flag | 5000 | `openai://` | oobabooga UI, needs `--api` flag to expose OpenAI endpoint |
1188
1192
  | TabbyAPI | Linux / Win | built-in | 5000 | `openai://` | Focused on exl2/GPTQ quantized models, low VRAM |
1189
1193
  | vLLM | Linux | built-in | 8000 | `openai://` | Production server, high throughput, requires significant VRAM |
1194
+
1195
+ ---
1196
+
1197
+ **(c) 2026 Stanislav Zholobetskyi**
1198
+ Institute for Information Recording, National Academy of Sciences of Ukraine, Kyiv
1199
+ *PhD research: «Intelligent Technology for Software Development and Maintenance Support»*
@@ -1,14 +1,19 @@
1
- # 1bcoder
2
-
3
- AI-assisted code editor designed for small (1B parameter) language models running locally via [Ollama](https://ollama.com), [LMStudio](https://lmstudio.ai), or [LiteLLM](https://litellm.ai).
4
-
5
- ---
1
+ Metadata-Version: 2.4
2
+ Name: 1bcoder
3
+ Version: 0.1.2
4
+ Summary: AI coding assistant agent for 1B–7B local models (Ollama, LMStudio, llama.cpp). Terminal REPL with file editing, project map, agents, scripts, and parallel multi-model queries.
5
+ Requires-Python: >=3.10
6
+ Description-Content-Type: text/markdown
7
+ License-File: LICENSE
8
+ Requires-Dist: requests>=2.28
9
+ Requires-Dist: pyreadline3>=3.4; sys_platform == "win32"
10
+ Requires-Dist: tqdm>=4.64
11
+ Requires-Dist: rich>=13.0
12
+ Dynamic: license-file
6
13
 
7
- **(c) 2026 Stanislav Zholobetskyi**
8
- Institute for Information Recording, National Academy of Sciences of Ukraine, Kyiv
14
+ # 1bcoder
9
15
 
10
- *Створено в рамках аспірантського дослідження на тему:
11
- «Інтелектуальна технологія підтримки розробки та супроводу програмних продуктів»*
16
+ AI coding assistant agent for 1B–7B local models running locally via [Ollama](https://ollama.com), [LMStudio](https://lmstudio.ai), or [LiteLLM](https://litellm.ai).
12
17
 
13
18
  ---
14
19
 
@@ -81,7 +86,7 @@ Tasks that require the model to decide *what to look at* — refactoring across
81
86
  - **`/plan <goal>`** — planning agent: researches the project, writes a natural-language step-by-step plan to `plan.txt`; run `/agent <task> plan plan.txt` to execute it step by step
82
87
  - **`/fill`** — fill agent: reads NaN session variables, scans project for `.var` files and config files, sets each value automatically
83
88
  - **Session variables** — `{{name}}` placeholders substituted in any command; save/load from `.var` files for offline reuse without loading files into context
84
- - **Project config** — `/config save` persists session state (host, model, ctx, params, vars, procs) to `.1bcoder/config.yml`; auto-loaded on startup when `auto: true`
89
+ - **Project config** — `/config save` persists session state (host, model, ctx, params, vars, procs) to `.1bcoder/config.yml`; `/config save global` saves to `~/.1bcoder/config.yml`; on startup, the first config with `auto: true` (local → global) is applied automatically
85
90
  - **Aliases** — define command shortcuts with `/alias /name = expansion` (supports `{{args}}`); persisted in `aliases.txt`; loaded from global then project directory at startup and survive `/clear`
86
91
  - **Backup/restore** — `/bkup save` rotates existing backups (`file.bkup` → `file.bkup(1)`, `file.bkup(2)`…) so no snapshot is ever overwritten; `/bkup restore` always restores the latest
87
92
  - **MCP support** — connect external tool servers (filesystem, web, git, database, browser…) via the Model Context Protocol
@@ -172,15 +177,15 @@ pip install -e .
172
177
  python chat.py
173
178
  ```
174
179
 
175
- On startup a numbered list of available Ollama models is shown type the number to select one. Use `--model` to skip the prompt.
180
+ On startup, 1bcoder checks for a config with `auto: true` (local `.1bcoder/config.yml` first, then `~/.1bcoder/config.yml`) and connects to the host and model stored there. If no config is found, it connects to local Ollama and prompts for a model. Use `--model` or `--host` to override.
176
181
 
177
182
  ### CLI options
178
183
 
179
184
  ```
180
185
  1bcoder [--host URL] [--model NAME] [--init] [--scriptapply SCRIPT] [--param KEY=VALUE]
181
186
 
182
- --host URL Host URL — supports ollama:// and openai:// schemes (default: http://localhost:11434)
183
- --model NAME Skip model selection, use this model directly
187
+ --host URL Host URL — supports ollama:// and openai:// schemes (default: from config or http://localhost:11434)
188
+ --model NAME Model to use; overrides config (shows list if not available on host)
184
189
  --init Create .1bcoder/ scaffold in the current directory
185
190
  --scriptapply SCRIPT Run a script file non-interactively, then exit
186
191
  --param KEY=VALUE Plan parameter substitution (repeatable)
@@ -789,17 +794,28 @@ summarize this for me -> myplan # capture LLM reply
789
794
 
790
795
  ### Project config (`/config`)
791
796
 
792
- Save and restore session state (host, model, ctx, params, vars, procs) to `.1bcoder/config.yml` in the current working directory. Useful for project-specific presets that are too large to fit in model context.
797
+ Save and restore session state (host, model, ctx, params, vars, procs). Two config locations are supported:
798
+
799
+ - **Local** — `.1bcoder/config.yml` in the current working directory (project-specific)
800
+ - **Global** — `~/.1bcoder/config.yml` (user-wide default for all projects)
801
+
802
+ **Startup priority:** on launch without `--host`/`--model`, 1bcoder checks local config first, then global. The first one with `auto: true` wins. If neither has `auto: true`, connects to local Ollama and prompts for a model.
793
803
 
794
804
  ```
795
- /config save # save all current state
796
- /config save host # save only host
797
- /config save model # save only model
798
- /config save vars # save only vars
799
- /config load # restore from config.yml
800
- /config show # print config.yml contents
801
- /config auto on # auto-load on every startup in this directory
802
- /config auto off # disable auto-load
805
+ /config save # save all current state to local config
806
+ /config save global # save all current state to global config
807
+ /config save host # save only host to local config
808
+ /config save global host # save only host to global config
809
+ /config save model # save only model to local config
810
+ /config save global model # save only model to global config
811
+ /config save vars # save only vars to local config
812
+ /config load # restore from local config
813
+ /config load global # restore from global config
814
+ /config show # print local config contents
815
+ /config show global # print global config contents
816
+ /config auto on # enable auto-load in local config
817
+ /config auto on global # enable auto-load in global config
818
+ /config auto off # disable auto-load in local config
803
819
  ```
804
820
 
805
821
  **Selective delete:**
@@ -812,7 +828,7 @@ Save and restore session state (host, model, ctx, params, vars, procs) to `.1bco
812
828
  /config del proc collect-files # remove one proc
813
829
  ```
814
830
 
815
- **Config file format** (`.1bcoder/config.yml`):
831
+ **Config file format** (`.1bcoder/config.yml` or `~/.1bcoder/config.yml`):
816
832
  ```yaml
817
833
  auto: true
818
834
  host: ollama://localhost:11434
@@ -828,7 +844,7 @@ procs:
828
844
  - collect-files output.txt
829
845
  ```
830
846
 
831
- When `auto: true`, the config is applied automatically after the startup banner host, model, ctx, params, vars, and procs are restored without any command.
847
+ When `auto: true`, host and model are used at startup to connect; ctx, params, vars, and procs are also restored.
832
848
 
833
849
  ---
834
850
 
@@ -1175,3 +1191,9 @@ For human input, the corrected command is shown with `[fix?]` and you are asked
1175
1191
  | text-generation-webui | Linux / Win | `--api` flag | 5000 | `openai://` | oobabooga UI, needs `--api` flag to expose OpenAI endpoint |
1176
1192
  | TabbyAPI | Linux / Win | built-in | 5000 | `openai://` | Focused on exl2/GPTQ quantized models, low VRAM |
1177
1193
  | vLLM | Linux | built-in | 8000 | `openai://` | Production server, high throughput, requires significant VRAM |
1194
+
1195
+ ---
1196
+
1197
+ **(c) 2026 Stanislav Zholobetskyi**
1198
+ Institute for Information Recording, National Academy of Sciences of Ukraine, Kyiv
1199
+ *PhD research: «Intelligent Technology for Software Development and Maintenance Support»*
@@ -1,26 +1,6 @@
1
- Metadata-Version: 2.4
2
- Name: 1bcoder
3
- Version: 0.1.1
4
- Requires-Python: >=3.10
5
- Description-Content-Type: text/markdown
6
- License-File: LICENSE
7
- Requires-Dist: requests>=2.28
8
- Requires-Dist: pyreadline3>=3.4; sys_platform == "win32"
9
- Requires-Dist: tqdm>=4.64
10
- Requires-Dist: rich>=13.0
11
- Dynamic: license-file
12
-
13
1
  # 1bcoder
14
2
 
15
- AI-assisted code editor designed for small (1B parameter) language models running locally via [Ollama](https://ollama.com), [LMStudio](https://lmstudio.ai), or [LiteLLM](https://litellm.ai).
16
-
17
- ---
18
-
19
- **(c) 2026 Stanislav Zholobetskyi**
20
- Institute for Information Recording, National Academy of Sciences of Ukraine, Kyiv
21
-
22
- *Створено в рамках аспірантського дослідження на тему:
23
- «Інтелектуальна технологія підтримки розробки та супроводу програмних продуктів»*
3
+ AI coding assistant agent for 1B–7B local models running locally via [Ollama](https://ollama.com), [LMStudio](https://lmstudio.ai), or [LiteLLM](https://litellm.ai).
24
4
 
25
5
  ---
26
6
 
@@ -93,7 +73,7 @@ Tasks that require the model to decide *what to look at* — refactoring across
93
73
  - **`/plan <goal>`** — planning agent: researches the project, writes a natural-language step-by-step plan to `plan.txt`; run `/agent <task> plan plan.txt` to execute it step by step
94
74
  - **`/fill`** — fill agent: reads NaN session variables, scans project for `.var` files and config files, sets each value automatically
95
75
  - **Session variables** — `{{name}}` placeholders substituted in any command; save/load from `.var` files for offline reuse without loading files into context
96
- - **Project config** — `/config save` persists session state (host, model, ctx, params, vars, procs) to `.1bcoder/config.yml`; auto-loaded on startup when `auto: true`
76
+ - **Project config** — `/config save` persists session state (host, model, ctx, params, vars, procs) to `.1bcoder/config.yml`; `/config save global` saves to `~/.1bcoder/config.yml`; on startup, the first config with `auto: true` (local → global) is applied automatically
97
77
  - **Aliases** — define command shortcuts with `/alias /name = expansion` (supports `{{args}}`); persisted in `aliases.txt`; loaded from global then project directory at startup and survive `/clear`
98
78
  - **Backup/restore** — `/bkup save` rotates existing backups (`file.bkup` → `file.bkup(1)`, `file.bkup(2)`…) so no snapshot is ever overwritten; `/bkup restore` always restores the latest
99
79
  - **MCP support** — connect external tool servers (filesystem, web, git, database, browser…) via the Model Context Protocol
@@ -184,15 +164,15 @@ pip install -e .
184
164
  python chat.py
185
165
  ```
186
166
 
187
- On startup a numbered list of available Ollama models is shown type the number to select one. Use `--model` to skip the prompt.
167
+ On startup, 1bcoder checks for a config with `auto: true` (local `.1bcoder/config.yml` first, then `~/.1bcoder/config.yml`) and connects to the host and model stored there. If no config is found, it connects to local Ollama and prompts for a model. Use `--model` or `--host` to override.
188
168
 
189
169
  ### CLI options
190
170
 
191
171
  ```
192
172
  1bcoder [--host URL] [--model NAME] [--init] [--scriptapply SCRIPT] [--param KEY=VALUE]
193
173
 
194
- --host URL Host URL — supports ollama:// and openai:// schemes (default: http://localhost:11434)
195
- --model NAME Skip model selection, use this model directly
174
+ --host URL Host URL — supports ollama:// and openai:// schemes (default: from config or http://localhost:11434)
175
+ --model NAME Model to use; overrides config (shows list if not available on host)
196
176
  --init Create .1bcoder/ scaffold in the current directory
197
177
  --scriptapply SCRIPT Run a script file non-interactively, then exit
198
178
  --param KEY=VALUE Plan parameter substitution (repeatable)
@@ -801,17 +781,28 @@ summarize this for me -> myplan # capture LLM reply
801
781
 
802
782
  ### Project config (`/config`)
803
783
 
804
- Save and restore session state (host, model, ctx, params, vars, procs) to `.1bcoder/config.yml` in the current working directory. Useful for project-specific presets that are too large to fit in model context.
784
+ Save and restore session state (host, model, ctx, params, vars, procs). Two config locations are supported:
785
+
786
+ - **Local** — `.1bcoder/config.yml` in the current working directory (project-specific)
787
+ - **Global** — `~/.1bcoder/config.yml` (user-wide default for all projects)
788
+
789
+ **Startup priority:** on launch without `--host`/`--model`, 1bcoder checks local config first, then global. The first one with `auto: true` wins. If neither has `auto: true`, connects to local Ollama and prompts for a model.
805
790
 
806
791
  ```
807
- /config save # save all current state
808
- /config save host # save only host
809
- /config save model # save only model
810
- /config save vars # save only vars
811
- /config load # restore from config.yml
812
- /config show # print config.yml contents
813
- /config auto on # auto-load on every startup in this directory
814
- /config auto off # disable auto-load
792
+ /config save # save all current state to local config
793
+ /config save global # save all current state to global config
794
+ /config save host # save only host to local config
795
+ /config save global host # save only host to global config
796
+ /config save model # save only model to local config
797
+ /config save global model # save only model to global config
798
+ /config save vars # save only vars to local config
799
+ /config load # restore from local config
800
+ /config load global # restore from global config
801
+ /config show # print local config contents
802
+ /config show global # print global config contents
803
+ /config auto on # enable auto-load in local config
804
+ /config auto on global # enable auto-load in global config
805
+ /config auto off # disable auto-load in local config
815
806
  ```
816
807
 
817
808
  **Selective delete:**
@@ -824,7 +815,7 @@ Save and restore session state (host, model, ctx, params, vars, procs) to `.1bco
824
815
  /config del proc collect-files # remove one proc
825
816
  ```
826
817
 
827
- **Config file format** (`.1bcoder/config.yml`):
818
+ **Config file format** (`.1bcoder/config.yml` or `~/.1bcoder/config.yml`):
828
819
  ```yaml
829
820
  auto: true
830
821
  host: ollama://localhost:11434
@@ -840,7 +831,7 @@ procs:
840
831
  - collect-files output.txt
841
832
  ```
842
833
 
843
- When `auto: true`, the config is applied automatically after the startup banner host, model, ctx, params, vars, and procs are restored without any command.
834
+ When `auto: true`, host and model are used at startup to connect; ctx, params, vars, and procs are also restored.
844
835
 
845
836
  ---
846
837
 
@@ -1187,3 +1178,9 @@ For human input, the corrected command is shown with `[fix?]` and you are asked
1187
1178
  | text-generation-webui | Linux / Win | `--api` flag | 5000 | `openai://` | oobabooga UI, needs `--api` flag to expose OpenAI endpoint |
1188
1179
  | TabbyAPI | Linux / Win | built-in | 5000 | `openai://` | Focused on exl2/GPTQ quantized models, low VRAM |
1189
1180
  | vLLM | Linux | built-in | 8000 | `openai://` | Production server, high throughput, requires significant VRAM |
1181
+
1182
+ ---
1183
+
1184
+ **(c) 2026 Stanislav Zholobetskyi**
1185
+ Institute for Information Recording, National Academy of Sciences of Ukraine, Kyiv
1186
+ *PhD research: «Intelligent Technology for Software Development and Maintenance Support»*
@@ -110,6 +110,7 @@ TIMEOUT = 120 # default HTTP read timeout in seconds
110
110
  AGENT_CONFIG_FILE = os.path.join(BCODER_DIR, "agent.txt")
111
111
  ALIASES_FILE = os.path.join(BCODER_DIR, "aliases.txt")
112
112
  CONFIG_FILE = os.path.join(BCODER_DIR, "config.yml")
113
+ GLOBAL_CONFIG_FILE = os.path.join(HOME_BCODER_DIR, "config.yml")
113
114
  GLOBAL_ALIASES_FILE = os.path.join(HOME_BCODER_DIR, "aliases.txt")
114
115
  AGENTS_DIR = os.path.join(BCODER_DIR, "agents")
115
116
  GLOBAL_AGENTS_DIR = os.path.join(HOME_BCODER_DIR, "agents")
@@ -284,17 +285,14 @@ Commands
284
285
  /edit <file> <line>
285
286
  Manually replace a line. Type new content when prompted.
286
287
  e.g. /edit main.py 15
287
-
288
288
  /edit <file> code
289
289
  Apply last AI reply (first code block) to the whole file.
290
290
  Creates the file if it does not exist. Shows unified diff before applying.
291
291
  e.g. /edit main.py code
292
-
293
292
  /edit <file> <line> code
294
293
  Apply last AI reply code block starting at <line>.
295
294
  Replaces as many lines as the new code has. Creates file if missing. Shows diff.
296
295
  e.g. /edit main.py 312 code
297
-
298
296
  /edit <file> <start>-<end> code
299
297
  Apply last AI reply code block replacing exactly lines start–end.
300
298
  Most precise form — use when you know the exact line range.
@@ -303,11 +301,9 @@ Commands
303
301
  /insert <file> <line>
304
302
  Insert last AI reply before line N (full text, no code extraction).
305
303
  e.g. /insert notes.txt 5
306
-
307
304
  /insert <file> <line> code
308
305
  Insert extracted code block from last AI reply before line N.
309
306
  e.g. /insert main.py 14 code
310
-
311
307
  /insert <file> <line> <inline text>
312
308
  Insert literal text directly (anything that is not the keyword "code").
313
309
  e.g. /insert main.py 14 SET_SLEEP_DELAY = 10
@@ -432,14 +428,6 @@ Commands
432
428
  /var set project_name =MyService
433
429
  /var get
434
430
  /var extract
435
-
436
- /role <persona> Set a system role prepended to every chat request (survives /ctx clear).
437
- /role show Show the current role.
438
- /role clear Remove the role.
439
- Default role: "You are a software developer assistant."
440
- Note: words like "senior", "expert", "professor" push the model to rely on its own knowledge
441
- and skip cautious steps (read-before-edit, describe-before-change). Use them intentionally.
442
-
443
431
  Output capture operators (work with any command — LLM reply, tool, proc):
444
432
  <command> -> <varname> Capture all output of <command> into session variable <varname>.
445
433
  $ Expand to the last captured output anywhere in a command or message.
@@ -448,6 +436,13 @@ Output capture operators (work with any command — LLM reply, tool, proc):
448
436
  summarize this for me -> myplan
449
437
  /agent planning $
450
438
 
439
+ /role <persona> Set a system role prepended to every chat request (survives /ctx clear).
440
+ /role show Show the current role.
441
+ /role clear Remove the role.
442
+ Default role: "You are a software developer assistant."
443
+ Note: words like "senior", "expert", "professor" push the model to rely on its own knowledge
444
+ and skip cautious steps (read-before-edit, describe-before-change). Use them intentionally.
445
+
451
446
  /team list List all team definitions (.yaml files in teams dir).
452
447
  /team show <name> Show workers defined in a team.
453
448
  /team run <name> [--param k=v] Spawn one 1bcoder process per worker, each runs its script.
@@ -471,16 +466,19 @@ Output capture operators (work with any command — LLM reply, tool, proc):
471
466
  /team show auth-analysis
472
467
  /team new my-team
473
468
 
474
- /config save [file] Save current state (host, model, ctx, params, vars, procs) to .1bcoder/config.yml.
475
- /config save host Save only the current host to config.
476
- /config save model Save only the current model to config.
477
- /config save ctx Save only the current ctx to config.
478
- /config save params Save only the current params to config.
479
- /config save vars Save only the current vars to config.
480
- /config save procs Save only the current procs to config.
481
- /config load [file] Restore state from config file.
482
- /config show [file] Print config file contents.
483
- /config auto on|off Enable/disable auto-load at startup.
469
+ /config save [file] Save current state (host, model, ctx, params, vars, procs) to .1bcoder/config.yml.
470
+ /config save global Save current state to ~/.1bcoder/config.yml (global).
471
+ /config save host Save only the current host to local config.
472
+ /config save global host Save only the current host to global config.
473
+ /config save model Save only the current model to local config.
474
+ /config save global model Save only the current model to global config.
475
+ /config save ctx Save only the current ctx to config.
476
+ /config save params Save only the current params to config.
477
+ /config save vars Save only the current vars to config.
478
+ /config save procs Save only the current procs to config.
479
+ /config load [file|global] Restore state from local config, file, or global config.
480
+ /config show [file|global] Print local config, file, or global config contents.
481
+ /config auto on|off [global] Enable/disable auto-load at startup (local or global config).
484
482
  /config del model|host|ctx Remove top-level key from config.
485
483
  /config del var <name> Remove specific variable from config.
486
484
  /config del vars Remove entire vars section from config.
@@ -489,7 +487,10 @@ Output capture operators (work with any command — LLM reply, tool, proc):
489
487
  /config del proc <name> Remove specific proc from config.
490
488
  /config del procs Remove entire procs section from config.
491
489
  e.g. /config save
490
+ /config save global
491
+ /config save global host
492
492
  /config auto on
493
+ /config auto on global
493
494
  /config del var project
494
495
  /config del procs
495
496
 
@@ -565,7 +566,7 @@ Output capture operators (work with any command — LLM reply, tool, proc):
565
566
  /mcp call read_file (if only one server connected)
566
567
  /mcp disconnect <name>
567
568
  Shut down a connected MCP server.
568
- See MCP.md for ready-to-use servers (filesystem, web, git, db, browser...).
569
+ See /doc MCP for ready-to-use servers (filesystem, web, git, db, browser...).
569
570
 
570
571
  /parallel ["prompt1"] ["prompt2"] [profile <name>] [host|model|file ...]
571
572
  Send prompts to multiple models in parallel. Each response saved to its file.
@@ -580,7 +581,6 @@ Output capture operators (work with any command — LLM reply, tool, proc):
580
581
  e.g. /parallel "review for bugs" profile small1
581
582
  /parallel "explain" "optimise" profile small1
582
583
  /parallel "what does this do" localhost:11434|llama3.2:1b|ans/a.txt
583
-
584
584
  /parallel profile create <name> [host|model|file ...]
585
585
  Inline: workers supplied as space-separated host|model|file specs.
586
586
  Interactive: omit workers — wizard prompts host/model/file one by one.
@@ -736,23 +736,22 @@ Output capture operators (work with any command — LLM reply, tool, proc):
736
736
  /agent read file plan: models.py, views.py, urls.py
737
737
  /agent fix the book model plan: steps.md
738
738
  /agent implement sharepoint plan: plan.md
739
+
739
740
  /init Create .1bcoder/ scaffold in current directory (safe to re-run).
740
741
 
741
742
  /help Show full help.
742
743
  /help <command> Show help for one command (e.g. /help map, /help fix).
743
744
  /help <command> ctx Same but also inject the text into AI context.
745
+
744
746
  /doc list List documentation articles in doc/.
745
- /doc <name> Show article (e.g. /doc PARAM, /doc MCP).
747
+ /doc <name> Show article rendered as Markdown.
748
+ /doc <name> raw Show article as plain text.
746
749
  /doc <name> ctx Add article to AI context.
750
+
747
751
  /exit Quit.
748
752
 
749
- ESC - interrupt AI response mid-stream.
753
+ Ctrl+C - interrupt AI response mid-stream.
750
754
  Enter - submit message.
751
- Shift+Enter - insert newline (requires terminal with Kitty keyboard support).
752
- Ctrl+N - insert newline (reliable fallback for all terminals).
753
-
754
- To select and copy text from the log (Windows):
755
- Hold Shift and drag with the left mouse button.
756
755
  """
757
756
 
758
757
 
@@ -2019,10 +2018,17 @@ class CoderCLI:
2019
2018
  print(" Ctrl+C interrupts stream /exit to quit")
2020
2019
  print(" <cmd> -> var capture output into variable $ = last output")
2021
2020
  print()
2022
- _auto_cfg = self._load_config_file()
2023
- if _auto_cfg.get("auto"):
2024
- print(f"[config] auto-loading {CONFIG_FILE}")
2025
- self._apply_config(_auto_cfg)
2021
+ _auto_cfg = {}
2022
+ for _cfg_path in (CONFIG_FILE, GLOBAL_CONFIG_FILE):
2023
+ if os.path.isfile(_cfg_path):
2024
+ _c = self._load_config_file(_cfg_path)
2025
+ if _c.get("auto"):
2026
+ _auto_cfg = _c
2027
+ break
2028
+ if _auto_cfg:
2029
+ # host/model already applied at startup — only apply remaining settings
2030
+ _session_cfg = {k: v for k, v in _auto_cfg.items() if k not in ("host", "model")}
2031
+ self._apply_config(_session_cfg)
2026
2032
  print()
2027
2033
  while True:
2028
2034
  try:
@@ -3497,7 +3503,14 @@ advanced_tools =
3497
3503
  elif sub == "apply":
3498
3504
  auto_yes, filename, params = _parse_script_apply_args(rest)
3499
3505
  if filename:
3500
- path = filename if os.path.isabs(filename) else os.path.join(SCRIPTS_DIR, filename)
3506
+ if os.path.isabs(filename):
3507
+ path = filename
3508
+ else:
3509
+ path = next(
3510
+ (os.path.join(d, filename) for d in (SCRIPTS_DIR, GLOBAL_SCRIPTS_DIR)
3511
+ if os.path.isfile(os.path.join(d, filename))),
3512
+ os.path.join(SCRIPTS_DIR, filename)
3513
+ )
3501
3514
  if not os.path.exists(path):
3502
3515
  print(f"script file not found: {path}")
3503
3516
  return
@@ -4272,8 +4285,18 @@ advanced_tools =
4272
4285
 
4273
4286
  if sub == "save":
4274
4287
  _SAVE_FIELDS = {"host", "model", "ctx", "params", "vars", "procs"}
4275
- field = arg2 if arg2 in _SAVE_FIELDS else None
4276
- cfg_path = CONFIG_FILE if (not arg2 or field) else arg2
4288
+ if arg2 == "global":
4289
+ cfg_path = GLOBAL_CONFIG_FILE
4290
+ field = arg3 if arg3 in _SAVE_FIELDS else None
4291
+ elif arg2 in _SAVE_FIELDS:
4292
+ cfg_path = CONFIG_FILE
4293
+ field = arg2
4294
+ elif arg2:
4295
+ cfg_path = arg2
4296
+ field = None
4297
+ else:
4298
+ cfg_path = CONFIG_FILE
4299
+ field = None
4277
4300
  # load existing to merge into (preserve what's already there)
4278
4301
  cfg = self._load_config_file(cfg_path)
4279
4302
  host_str = (self.base_url.replace("http://", "openai://")
@@ -4296,7 +4319,7 @@ advanced_tools =
4296
4319
  if self._proc_active:
4297
4320
  cfg["procs"] = list(self._proc_active)
4298
4321
  try:
4299
- os.makedirs(BCODER_DIR, exist_ok=True)
4322
+ os.makedirs(os.path.dirname(cfg_path), exist_ok=True)
4300
4323
  with open(cfg_path, "w", encoding="utf-8") as f:
4301
4324
  f.write(self._write_config_yml(cfg))
4302
4325
  saved = field or "all"
@@ -4305,7 +4328,7 @@ advanced_tools =
4305
4328
  print(f"[config] cannot write: {e}")
4306
4329
 
4307
4330
  elif sub == "load":
4308
- cfg_path = arg2 if arg2 else CONFIG_FILE
4331
+ cfg_path = GLOBAL_CONFIG_FILE if arg2 == "global" else (arg2 if arg2 else CONFIG_FILE)
4309
4332
  cfg = self._load_config_file(cfg_path)
4310
4333
  if not cfg:
4311
4334
  print(f"[config] not found or empty: {cfg_path}")
@@ -4314,7 +4337,7 @@ advanced_tools =
4314
4337
  _ok(f"[config] loaded ← {cfg_path}")
4315
4338
 
4316
4339
  elif sub == "show":
4317
- cfg_path = arg2 if arg2 else CONFIG_FILE
4340
+ cfg_path = GLOBAL_CONFIG_FILE if arg2 == "global" else (arg2 if arg2 else CONFIG_FILE)
4318
4341
  if not os.path.isfile(cfg_path):
4319
4342
  print(f"[config] no config file: {cfg_path}")
4320
4343
  return
@@ -4322,16 +4345,19 @@ advanced_tools =
4322
4345
  print(f.read())
4323
4346
 
4324
4347
  elif sub == "auto":
4325
- if arg2 not in ("on", "off"):
4326
- print("usage: /config auto on | off")
4348
+ use_global = arg3 == "global" or arg2 == "global"
4349
+ onoff = arg3 if arg2 == "global" else arg2
4350
+ if onoff not in ("on", "off"):
4351
+ print("usage: /config auto on|off [global]")
4327
4352
  return
4328
- cfg = self._load_config_file()
4329
- cfg["auto"] = (arg2 == "on")
4353
+ cfg_path = GLOBAL_CONFIG_FILE if use_global else CONFIG_FILE
4354
+ cfg = self._load_config_file(cfg_path)
4355
+ cfg["auto"] = (onoff == "on")
4330
4356
  try:
4331
- os.makedirs(BCODER_DIR, exist_ok=True)
4332
- with open(CONFIG_FILE, "w", encoding="utf-8") as f:
4357
+ os.makedirs(os.path.dirname(cfg_path), exist_ok=True)
4358
+ with open(cfg_path, "w", encoding="utf-8") as f:
4333
4359
  f.write(self._write_config_yml(cfg))
4334
- _ok(f"[config] auto {'on' if cfg['auto'] else 'off'}")
4360
+ _ok(f"[config] auto {'on' if cfg['auto'] else 'off'} → {cfg_path}")
4335
4361
  except OSError as e:
4336
4362
  print(f"[config] cannot write: {e}")
4337
4363
 
@@ -4388,32 +4414,42 @@ advanced_tools =
4388
4414
 
4389
4415
  def _cmd_doc(self, user_input: str):
4390
4416
  """List or display documentation articles from the doc/ folder."""
4391
- DOC_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), ".1bcoder", "doc")
4417
+ LOCAL_DOC_DIR = os.path.join(BCODER_DIR, "doc")
4418
+ GLOBAL_DOC_DIR = os.path.join(HOME_BCODER_DIR, "doc")
4419
+ DOC_DIRS = [d for d in (LOCAL_DOC_DIR, GLOBAL_DOC_DIR) if os.path.isdir(d)]
4392
4420
  tokens = user_input.split(None, 2)
4393
4421
  sub = tokens[1].lower() if len(tokens) >= 2 else "list"
4394
4422
 
4395
4423
  if sub == "list" or sub == "ls":
4396
- if not os.path.isdir(DOC_DIR):
4424
+ if not DOC_DIRS:
4397
4425
  _err("doc/ folder not found")
4398
4426
  return
4399
- files = sorted(f for f in os.listdir(DOC_DIR) if f.lower().endswith(".md"))
4427
+ seen = set()
4428
+ files = []
4429
+ for d in DOC_DIRS:
4430
+ for f in os.listdir(d):
4431
+ if f.lower().endswith(".md") and f.lower() not in seen:
4432
+ seen.add(f.lower())
4433
+ files.append(f)
4434
+ files.sort()
4400
4435
  if not files:
4401
4436
  print(" (no articles in doc/)")
4402
4437
  return
4403
4438
  print(" Available articles (use /doc <name> to read):")
4404
4439
  for f in files:
4405
- name = f[:-3] # strip .md
4406
- print(f" {name}")
4440
+ print(f" {f[:-3]}")
4407
4441
  return
4408
4442
 
4409
- # find article — case-insensitive, .md optional
4443
+ # find article — local overrides global, case-insensitive, .md optional
4410
4444
  name = sub if sub.endswith(".md") else sub + ".md"
4411
4445
  path = None
4412
- if os.path.isdir(DOC_DIR):
4413
- for f in os.listdir(DOC_DIR):
4446
+ for d in DOC_DIRS:
4447
+ for f in os.listdir(d):
4414
4448
  if f.lower() == name.lower():
4415
- path = os.path.join(DOC_DIR, f)
4449
+ path = os.path.join(d, f)
4416
4450
  break
4451
+ if path:
4452
+ break
4417
4453
  if path is None:
4418
4454
  _err(f"doc not found: {sub} (try /doc list)")
4419
4455
  return
@@ -4425,12 +4461,16 @@ advanced_tools =
4425
4461
  _err(f"cannot read {path}: {e}")
4426
4462
  return
4427
4463
 
4428
- add_ctx = len(tokens) >= 3 and tokens[2].lower() == "ctx"
4429
- if add_ctx:
4464
+ qualifier = tokens[2].lower() if len(tokens) >= 3 else ""
4465
+ if qualifier == "ctx":
4430
4466
  self.messages.append({"role": "user", "content": f"[doc/{sub.upper()}.md]\n{text}"})
4431
4467
  _ok(f"[doc] {sub.upper()}.md added to context ({len(text):,} chars)")
4432
- else:
4468
+ elif qualifier == "raw":
4433
4469
  print(text)
4470
+ else:
4471
+ from rich.console import Console
4472
+ from rich.markdown import Markdown
4473
+ Console().print(Markdown(text))
4434
4474
 
4435
4475
  def _cmd_team(self, user_input: str):
4436
4476
  import shlex, concurrent.futures
@@ -6124,10 +6164,38 @@ advanced_tools =
6124
6164
 
6125
6165
  # ── entry point ────────────────────────────────────────────────────────────────
6126
6166
 
6167
+ def _load_startup_config() -> tuple[dict, str]:
6168
+ """Load the first config with auto: true (local → global).
6169
+ Returns (cfg_dict, config_path), or ({}, '') if none have auto: true."""
6170
+ for cfg_path in (CONFIG_FILE, GLOBAL_CONFIG_FILE):
6171
+ if os.path.isfile(cfg_path):
6172
+ try:
6173
+ with open(cfg_path, encoding="utf-8") as f:
6174
+ cfg = CoderCLI._parse_config_yml(f.read())
6175
+ if cfg.get("auto"):
6176
+ return cfg, cfg_path
6177
+ except OSError:
6178
+ pass
6179
+ return {}, ""
6180
+
6181
+
6182
+ def _pick_model(models: list) -> str:
6183
+ """Interactively prompt user to pick a model by number. Exits on Ctrl+C."""
6184
+ while True:
6185
+ try:
6186
+ raw = input("Pick [1]: ").strip() or "1"
6187
+ idx = int(raw) - 1
6188
+ if 0 <= idx < len(models):
6189
+ return models[idx]
6190
+ except (ValueError, KeyboardInterrupt, EOFError):
6191
+ print()
6192
+ sys.exit(0)
6193
+
6194
+
6127
6195
  def main():
6128
6196
  parser = argparse.ArgumentParser(description="1bcoder — AI coder for 1B models")
6129
- parser.add_argument("--host", default="http://localhost:11434",
6130
- help="Ollama host (default: http://localhost:11434)")
6197
+ parser.add_argument("--host", default=None,
6198
+ help="Ollama host (default: from config.yml or http://localhost:11434)")
6131
6199
  parser.add_argument("--model",
6132
6200
  help="Model name to use (skips selection prompt)")
6133
6201
  parser.add_argument("--init", action="store_true",
@@ -6150,6 +6218,9 @@ def main():
6150
6218
  else:
6151
6219
  print(f"Initialized .1bcoder/scripts/ in {WORKDIR}")
6152
6220
 
6221
+ explicit_host = args.host is not None
6222
+ explicit_model = args.model is not None
6223
+
6153
6224
  if args.scriptapply:
6154
6225
  script = args.scriptapply
6155
6226
  if not os.path.isabs(script):
@@ -6164,8 +6235,10 @@ def main():
6164
6235
  if not os.path.exists(script):
6165
6236
  print(f"Script not found: {script}")
6166
6237
  sys.exit(1)
6167
- base_url, provider = parse_host(args.host)
6168
- model = args.model or ""
6238
+ cfg, _ = ({}, "") if (explicit_host and explicit_model) else _load_startup_config()
6239
+ host_str = args.host if explicit_host else (cfg.get("host") or "http://localhost:11434")
6240
+ base_url, provider = parse_host(host_str)
6241
+ model = args.model or cfg.get("model") or ""
6169
6242
  models = []
6170
6243
  if not model:
6171
6244
  try:
@@ -6191,7 +6264,14 @@ def main():
6191
6264
  cli._cmd_script(f"/script apply -y {script_fwd} {param_tokens}".strip())
6192
6265
  sys.exit(0)
6193
6266
 
6194
- base_url, provider = parse_host(args.host)
6267
+ # ── load config unless both host and model were explicitly given ──────────
6268
+ cfg, cfg_path = ({}, "") if (explicit_host and explicit_model) else _load_startup_config()
6269
+
6270
+ # ── resolve host ──────────────────────────────────────────────────────────
6271
+ host_str = args.host if explicit_host else (cfg.get("host") or "http://localhost:11434")
6272
+ base_url, provider = parse_host(host_str)
6273
+
6274
+ # ── connect ───────────────────────────────────────────────────────────────
6195
6275
  try:
6196
6276
  models = list_models(base_url, provider)
6197
6277
  except requests.exceptions.ConnectionError:
@@ -6205,8 +6285,16 @@ def main():
6205
6285
  print("No models available. Run: ollama pull <model>")
6206
6286
  sys.exit(1)
6207
6287
 
6208
- if args.model and args.model in models:
6209
- model = args.model
6288
+ # ── resolve model ─────────────────────────────────────────────────────────
6289
+ desired = args.model if explicit_model else cfg.get("model")
6290
+ if desired and desired in models:
6291
+ model = desired
6292
+ elif desired:
6293
+ print(f"Model '{desired}' not available on {base_url}.")
6294
+ print("Available models:")
6295
+ for i, m in enumerate(models, 1):
6296
+ print(f" {i}. {m}")
6297
+ model = _pick_model(models)
6210
6298
  elif len(models) == 1:
6211
6299
  model = models[0]
6212
6300
  print(f"Model: {model}")
@@ -6214,16 +6302,7 @@ def main():
6214
6302
  print("Available models:")
6215
6303
  for i, m in enumerate(models, 1):
6216
6304
  print(f" {i}. {m}")
6217
- while True:
6218
- try:
6219
- raw = input("Pick [1]: ").strip() or "1"
6220
- idx = int(raw) - 1
6221
- if 0 <= idx < len(models):
6222
- model = models[idx]
6223
- break
6224
- except (ValueError, KeyboardInterrupt, EOFError):
6225
- print()
6226
- sys.exit(0)
6305
+ model = _pick_model(models)
6227
6306
 
6228
6307
  CoderCLI(base_url, model, models, provider).run()
6229
6308
 
@@ -4,7 +4,8 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "1bcoder"
7
- version = "0.1.1"
7
+ version = "0.1.2"
8
+ description = "AI coding assistant agent for 1B–7B local models (Ollama, LMStudio, llama.cpp). Terminal REPL with file editing, project map, agents, scripts, and parallel multi-model queries."
8
9
  requires-python = ">=3.10"
9
10
  readme = {file = "README.md", content-type = "text/markdown"}
10
11
  dependencies = [
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes