vimlm 0.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
vimlm-0.0.1/PKG-INFO ADDED
@@ -0,0 +1,55 @@
1
+ Metadata-Version: 2.2
2
+ Name: vimlm
3
+ Version: 0.0.1
4
+ Summary: VimLM - LLM-powered Vim assistant
5
+ Home-page: https://github.com/JosefAlbers/vimlm
6
+ Author: Josef Albers
7
+ Author-email: albersj66@gmail.com
8
+ Requires-Python: >=3.13.1
9
+ Description-Content-Type: text/markdown
10
+ Requires-Dist: nanollama==0.0.3
11
+ Requires-Dist: watchfiles==1.0.4
12
+ Dynamic: author
13
+ Dynamic: author-email
14
+ Dynamic: description
15
+ Dynamic: description-content-type
16
+ Dynamic: home-page
17
+ Dynamic: requires-dist
18
+ Dynamic: requires-python
19
+ Dynamic: summary
20
+
21
+ # VimLM - LLM-powered Vim assistant
22
+
23
+ ## Features
24
+
25
+ - Real-time code assistance using local LLMs
26
+ - Context-aware suggestions based on your current file
27
+ - Split-window interface showing LLM responses
28
+ - Simple keybinding integration with Vim
29
+ - Works completely offline with local models
30
+
31
+ ## Installation
32
+
33
+ ```zsh
34
+ pip install vimlm
35
+ ```
36
+
37
+ ## Usage
38
+
39
+ 1. Start Vim with VimLM:
40
+
41
+ ```zsh
42
+ vimlm your_file.js
43
+ ```
44
+
45
+ 2. Use the key bindings in Vim:
46
+ - `Ctrl-L` in normal mode: Get suggestions for current line
47
+ - `Ctrl-L` in visual mode: Get suggestions for selected code
48
+
49
+ The LLM response will appear in a split window on the right side of your Vim interface.
50
+
51
+ ## Demo
52
+
53
+ ![vimlm](https://github.com/user-attachments/assets/4aa39efe-aa6d-4363-8fe1-cf964d7f849c)
54
+
55
+
vimlm-0.0.1/README.md ADDED
@@ -0,0 +1,35 @@
1
+ # VimLM - LLM-powered Vim assistant
2
+
3
+ ## Features
4
+
5
+ - Real-time code assistance using local LLMs
6
+ - Context-aware suggestions based on your current file
7
+ - Split-window interface showing LLM responses
8
+ - Simple keybinding integration with Vim
9
+ - Works completely offline with local models
10
+
11
+ ## Installation
12
+
13
+ ```zsh
14
+ pip install vimlm
15
+ ```
16
+
17
+ ## Usage
18
+
19
+ 1. Start Vim with VimLM:
20
+
21
+ ```zsh
22
+ vimlm your_file.js
23
+ ```
24
+
25
+ 2. Use the key bindings in Vim:
26
+ - `Ctrl-L` in normal mode: Get suggestions for current line
27
+ - `Ctrl-L` in visual mode: Get suggestions for selected code
28
+
29
+ The LLM response will appear in a split window on the right side of your Vim interface.
30
+
31
+ ## Demo
32
+
33
+ ![vimlm](https://github.com/user-attachments/assets/4aa39efe-aa6d-4363-8fe1-cf964d7f849c)
34
+
35
+
vimlm-0.0.1/setup.cfg ADDED
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
vimlm-0.0.1/setup.py ADDED
@@ -0,0 +1,25 @@
1
+ from setuptools import setup, find_packages
2
+
3
+ with open("requirements.txt") as f:
4
+ requirements = [l.strip() for l in f.readlines()]
5
+
6
+ setup(
7
+ name="vimlm",
8
+ version="0.0.1",
9
+ author="Josef Albers",
10
+ author_email="albersj66@gmail.com",
11
+ readme='README.md',
12
+ description="VimLM - LLM-powered Vim assistant",
13
+ long_description=open('README.md', encoding='utf-8').read(),
14
+ long_description_content_type="text/markdown",
15
+ url="https://github.com/JosefAlbers/vimlm",
16
+ packages=find_packages(),
17
+ python_requires=">=3.13.1",
18
+ install_requires=requirements,
19
+ entry_points={
20
+ "console_scripts": [
21
+ "vimlm=vimlm.main:main",
22
+ ],
23
+ },
24
+ )
25
+
File without changes
@@ -0,0 +1,72 @@
1
+ import argparse
2
+ import subprocess
3
+ import tempfile
4
+ import os
5
+ from pathlib import Path
6
+
7
+ VIM_CONFIG = """
8
+ let s:watched_dir = expand('~/watched_dir')
9
+
10
+ function! Monitor()
11
+ write
12
+ let response_path = s:watched_dir . '/response.md'
13
+ rightbelow vsplit | execute 'view ' . response_path
14
+ setlocal autoread
15
+ setlocal readonly
16
+ setlocal nobuflisted
17
+ filetype detect
18
+ syntax on
19
+ wincmd h
20
+ let s:monitor_timer = timer_start(1000, 'CheckForUpdates', {'repeat': -1})
21
+ endfunction
22
+
23
+ function! CheckForUpdates(timer)
24
+ let bufnum = bufnr(s:watched_dir . '/response.md')
25
+ if bufnum == -1
26
+ call timer_stop(s:monitor_timer)
27
+ return
28
+ endif
29
+ silent! checktime
30
+ endfunction
31
+
32
+ function! SaveUserInput()
33
+ let user_input = input('Ask LLM: ')
34
+ let user_file = s:watched_dir . '/user'
35
+ call writefile([user_input], user_file, 'w')
36
+ let current_file = expand('%:t')
37
+ let tree_file = s:watched_dir . '/tree'
38
+ call writefile([current_file], tree_file, 'w')
39
+ endfunction
40
+
41
+ vnoremap <c-l> :w! ~/watched_dir/yank<CR>:w! ~/watched_dir/context<CR>:call SaveUserInput()<CR>
42
+ nnoremap <c-l> V:w! ~/watched_dir/yank<CR>:w! ~/watched_dir/context<CR>:call SaveUserInput()<CR>
43
+
44
+ call Monitor()
45
+ """
46
+
47
+ def main():
48
+ parser = argparse.ArgumentParser(description="VimLM - LLM-powered Vim assistant")
49
+ parser.add_argument("vim_args", nargs=argparse.REMAINDER, help="Additional Vim arguments")
50
+ args = parser.parse_args()
51
+ watch_dir = Path.home() / "watched_dir"
52
+ watch_dir.mkdir(exist_ok=True)
53
+ with tempfile.NamedTemporaryFile(mode='w', suffix='.vim', delete=False) as f:
54
+ f.write(VIM_CONFIG)
55
+ vim_script = f.name
56
+ try:
57
+ watcher = subprocess.Popen(
58
+ ["python", "-m", "vimlm.watcher"],
59
+ stdout=(watch_dir / "response.md").open("w"),
60
+ stderr=subprocess.STDOUT,
61
+ )
62
+ vim_command = ["vim", "-c", f"source {vim_script}"]
63
+ if args.vim_args:
64
+ vim_command.extend(args.vim_args)
65
+ subprocess.run(vim_command)
66
+ finally:
67
+ watcher.terminate()
68
+ watcher.wait()
69
+ os.unlink(vim_script)
70
+
71
+ if __name__ == "__main__":
72
+ main()
@@ -0,0 +1,59 @@
1
+ import asyncio
2
+ import subprocess
3
+ import json
4
+ import os
5
+ from watchfiles import awatch
6
+ from nanollama32 import Chat
7
+
8
+ DEBUG = False
9
+ NUM_TOKEN = 1000
10
+ DIRECTORY = os.path.expanduser("~/watched_dir")
11
+ OUT_FILE = "response.md"
12
+ LOG_FILE = "log.json"
13
+ FILES = ["context", "yank", "user", "tree"]
14
+
15
+ out_path = os.path.join(DIRECTORY, OUT_FILE)
16
+ log_path = os.path.join(DIRECTORY, LOG_FILE)
17
+ os.makedirs(DIRECTORY, exist_ok=True)
18
+ chat = Chat(variant='uncn_llama_32_3b_it')
19
+
20
+ with open(out_path, "w", encoding="utf-8") as f:
21
+ f.write('LLM is ready')
22
+
23
+ async def monitor_directory():
24
+ async for changes in awatch(DIRECTORY):
25
+ found_files = {os.path.basename(f) for _, f in changes}
26
+ if FILES[-1] in found_files and set(FILES).issubset(set(os.listdir(DIRECTORY))):
27
+ await process_files()
28
+
29
+ async def process_files():
30
+ data = {}
31
+ for file in FILES:
32
+ path = os.path.join(DIRECTORY, file)
33
+ with open(path, "r", encoding="utf-8") as f:
34
+ data[file] = f.read().strip()
35
+ os.remove(path)
36
+ data['ext'] = data['tree'].split('.')[-1]
37
+ if len(data['yank']) > 0:
38
+ str_template = "**{tree}**\n```{ext}\n{context}\n```\n\n```{ext}\n{yank}\n```\n\n{user}" if '\n' in data['yank'] else "**{tree}**\n```{ext}\n{context}\n```\n\n`{yank}` {user}"
39
+ else:
40
+ str_template = "**{tree}**\n```{ext}\n{context}\n\n```\n\n{user}"
41
+ prompt = str_template.format(**data)
42
+ with open(out_path, "w", encoding="utf-8") as f:
43
+ f.write('')
44
+ response = chat(prompt, max_new=NUM_TOKEN, verbose=DEBUG, stream=out_path)[0][:-10].strip()
45
+ with open(out_path, "w", encoding="utf-8") as f:
46
+ f.write(response)
47
+ chat.reset()
48
+ if DEBUG:
49
+ if os.path.exists(log_path):
50
+ with open(log_path, "r", encoding="utf-8") as log_f:
51
+ logs = json.load(log_f)
52
+ else:
53
+ logs = []
54
+ logs.append({'prompt':prompt, 'respone':response})
55
+ with open(log_path, "w", encoding="utf-8") as log_f:
56
+ json.dump(logs, log_f, indent=2)
57
+
58
+ asyncio.run(monitor_directory())
59
+
@@ -0,0 +1,55 @@
1
+ Metadata-Version: 2.2
2
+ Name: vimlm
3
+ Version: 0.0.1
4
+ Summary: VimLM - LLM-powered Vim assistant
5
+ Home-page: https://github.com/JosefAlbers/vimlm
6
+ Author: Josef Albers
7
+ Author-email: albersj66@gmail.com
8
+ Requires-Python: >=3.13.1
9
+ Description-Content-Type: text/markdown
10
+ Requires-Dist: nanollama==0.0.3
11
+ Requires-Dist: watchfiles==1.0.4
12
+ Dynamic: author
13
+ Dynamic: author-email
14
+ Dynamic: description
15
+ Dynamic: description-content-type
16
+ Dynamic: home-page
17
+ Dynamic: requires-dist
18
+ Dynamic: requires-python
19
+ Dynamic: summary
20
+
21
+ # VimLM - LLM-powered Vim assistant
22
+
23
+ ## Features
24
+
25
+ - Real-time code assistance using local LLMs
26
+ - Context-aware suggestions based on your current file
27
+ - Split-window interface showing LLM responses
28
+ - Simple keybinding integration with Vim
29
+ - Works completely offline with local models
30
+
31
+ ## Installation
32
+
33
+ ```zsh
34
+ pip install vimlm
35
+ ```
36
+
37
+ ## Usage
38
+
39
+ 1. Start Vim with VimLM:
40
+
41
+ ```zsh
42
+ vimlm your_file.js
43
+ ```
44
+
45
+ 2. Use the key bindings in Vim:
46
+ - `Ctrl-L` in normal mode: Get suggestions for current line
47
+ - `Ctrl-L` in visual mode: Get suggestions for selected code
48
+
49
+ The LLM response will appear in a split window on the right side of your Vim interface.
50
+
51
+ ## Demo
52
+
53
+ ![vimlm](https://github.com/user-attachments/assets/4aa39efe-aa6d-4363-8fe1-cf964d7f849c)
54
+
55
+
@@ -0,0 +1,11 @@
1
+ README.md
2
+ setup.py
3
+ vimlm/__init__.py
4
+ vimlm/main.py
5
+ vimlm/watcher.py
6
+ vimlm.egg-info/PKG-INFO
7
+ vimlm.egg-info/SOURCES.txt
8
+ vimlm.egg-info/dependency_links.txt
9
+ vimlm.egg-info/entry_points.txt
10
+ vimlm.egg-info/requires.txt
11
+ vimlm.egg-info/top_level.txt
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ vimlm = vimlm.main:main
@@ -0,0 +1,2 @@
1
+ nanollama==0.0.3
2
+ watchfiles==1.0.4
@@ -0,0 +1 @@
1
+ vimlm