nlshell 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nlshell-0.1.0/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2024 Mattias Brycke
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in
13
+ all copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21
+ THE SOFTWARE.
nlshell-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,92 @@
1
+ Metadata-Version: 2.1
2
+ Name: nlshell
3
+ Version: 0.1.0
4
+ Summary: Creates shell command from a description using an LLM
5
+ License: MIT
6
+ Author: Mattias Brycke
7
+ Author-email: mattias.brycke@gmail.com
8
+ Requires-Python: >=3.10,<4.0
9
+ Classifier: License :: OSI Approved :: MIT License
10
+ Classifier: Programming Language :: Python :: 3
11
+ Classifier: Programming Language :: Python :: 3.10
12
+ Classifier: Programming Language :: Python :: 3.11
13
+ Classifier: Programming Language :: Python :: 3.12
14
+ Classifier: Programming Language :: Python :: 3.13
15
+ Requires-Dist: openai (>=1.52.2,<2.0.0)
16
+ Description-Content-Type: text/markdown
17
+
18
+ # nlshell
19
+
20
+ A very small Python package that generates shell commands from a "natural language" description.
21
+ It will provide a explanation of the command and prefill the command line with the generated command.
22
+
23
+ Useses an LLM model to generate the shell commands.
24
+
25
+ ### _IMPORTANT! Never run a generated command without understanding what it does. The generated command may be harmful. There is no guarantee whatsoever that what the LLM suggests is correct! DON'T BLINDLY TRUST THE GENERATED SUGGESTION!_
26
+
27
+ ## Table of Contents
28
+
29
+ - [Installation](#installation)
30
+ - [Usage](#usage)
31
+ - [License](#license)
32
+ - [GitHub](#github)
33
+
34
+ ## Installation
35
+
36
+ Instructions on how to install your package. Include both the pip and poetry methods.
37
+
38
+ ```bash
39
+ # Using pip
40
+ pip install nlshell
41
+
42
+ # Using poetry
43
+ poetry add nlshell
44
+
45
+ ```
46
+
47
+ ## Usage
48
+
49
+ Activate the virtual environment where the package is installed to use the nlshell command.
50
+
51
+
52
+ ### Set the base_url
53
+ The package uses the openai client to call an LLM. By specifying a `base-url` you can use your own model, e.g. a local model.
54
+ ```shell
55
+ c --base-url http://localhost:11434/v1
56
+ ```
57
+ If no `base_url` is explicitly set, the package will ask which url to use.
58
+
59
+
60
+ ### Create a command
61
+ ```shell
62
+ c list all files in the current directory, including hidden files
63
+ ```
64
+ where c is the nlshell command.
65
+ This command will generate a response like this:
66
+ ```text
67
+ The 'ls' command lists directory contents. The option '-l' provides a long listing format which includes file permissions, number of links, owner, group, size, and time of last modification. The '-a' option ensures hidden files (those starting with a dot .) are also listed.
68
+ $ ls -la
69
+ ```
70
+
71
+
72
+ ### Set the api_key
73
+ ```shell
74
+ c --api-key your-api-key
75
+ ```
76
+ Even if you run a local model you need to set an api_key since the openai client requires it, even if the key is just a dummy key.
77
+
78
+ ### Set the model
79
+
80
+ ```shell
81
+ c --model-name qwen2.5-coder:7b
82
+ ```
83
+ If no model is specified the package will ask which model to use.
84
+
85
+
86
+
87
+ ## License
88
+ Distributed under the MIT License. See `LICENSE` for more information.
89
+
90
+ ## GitHub
91
+ [https://github.com/mbrycke/nlshell](https://github.com/mbrycke/nlshell)
92
+
@@ -0,0 +1,74 @@
1
+ # nlshell
2
+
3
+ A very small Python package that generates shell commands from a "natural language" description.
4
+ It will provide a explanation of the command and prefill the command line with the generated command.
5
+
6
+ Useses an LLM model to generate the shell commands.
7
+
8
+ ### _IMPORTANT! Never run a generated command without understanding what it does. The generated command may be harmful. There is no guarantee whatsoever that what the LLM suggests is correct! DON'T BLINDLY TRUST THE GENERATED SUGGESTION!_
9
+
10
+ ## Table of Contents
11
+
12
+ - [Installation](#installation)
13
+ - [Usage](#usage)
14
+ - [License](#license)
15
+ - [GitHub](#github)
16
+
17
+ ## Installation
18
+
19
+ Instructions on how to install your package. Include both the pip and poetry methods.
20
+
21
+ ```bash
22
+ # Using pip
23
+ pip install nlshell
24
+
25
+ # Using poetry
26
+ poetry add nlshell
27
+
28
+ ```
29
+
30
+ ## Usage
31
+
32
+ Activate the virtual environment where the package is installed to use the nlshell command.
33
+
34
+
35
+ ### Set the base_url
36
+ The package uses the openai client to call an LLM. By specifying a `base-url` you can use your own model, e.g. a local model.
37
+ ```shell
38
+ c --base-url http://localhost:11434/v1
39
+ ```
40
+ If no `base_url` is explicitly set, the package will ask which url to use.
41
+
42
+
43
+ ### Create a command
44
+ ```shell
45
+ c list all files in the current directory, including hidden files
46
+ ```
47
+ where c is the nlshell command.
48
+ This command will generate a response like this:
49
+ ```text
50
+ The 'ls' command lists directory contents. The option '-l' provides a long listing format which includes file permissions, number of links, owner, group, size, and time of last modification. The '-a' option ensures hidden files (those starting with a dot .) are also listed.
51
+ $ ls -la
52
+ ```
53
+
54
+
55
+ ### Set the api_key
56
+ ```shell
57
+ c --api-key your-api-key
58
+ ```
59
+ Even if you run a local model you need to set an api_key since the openai client requires it, even if the key is just a dummy key.
60
+
61
+ ### Set the model
62
+
63
+ ```shell
64
+ c --model-name qwen2.5-coder:7b
65
+ ```
66
+ If no model is specified the package will ask which model to use.
67
+
68
+
69
+
70
+ ## License
71
+ Distributed under the MIT License. See `LICENSE` for more information.
72
+
73
+ ## GitHub
74
+ [https://github.com/mbrycke/nlshell](https://github.com/mbrycke/nlshell)
File without changes
@@ -0,0 +1,197 @@
1
+ import argparse
2
+ import json
3
+ import openai
4
+ import os
5
+ import pydantic
6
+ import readline
7
+ from openai import OpenAI
8
+ from pydantic import BaseModel
9
+ from nlshell.settings import (
10
+ handle_warning_message,
11
+ set_config,
12
+ get_base_url,
13
+ get_model,
14
+ get_api_key,
15
+ )
16
+
17
+ N_GENERATION_ATTEMPTS = 3
18
+
19
+
20
+ class Command(BaseModel):
21
+ command: str
22
+ explanation: str
23
+
24
+
25
+ def extract_json_content(s):
26
+ if "```json" not in s:
27
+ # if markdown json not present, return the whole string
28
+ return s.replace("\n", "")
29
+
30
+ # Find the start and end indices for the markdown JSON content
31
+ start_index = s.find("```json") + len("```json\n")
32
+ end_index = s.rfind("```")
33
+ return s[start_index:end_index].strip().replace("\n", "")
34
+
35
+
36
+ # TODO: implement `return_format` when available
37
+ def generate_command(prompt, url, model="qwen2.5-coder:7b", api_key="abc123"):
38
+ """
39
+ Use the OpenAI client to generate a command.
40
+
41
+ Args:
42
+ prompt (str): The description of the command to generate.
43
+ url (str): The URL of the OpenAI API.
44
+
45
+ Returns:
46
+ dict: A dictionary containing the generated command and an explanation.
47
+ """
48
+
49
+ client = OpenAI(api_key=api_key, base_url=url)
50
+
51
+ try:
52
+ response = client.chat.completions.create(
53
+ model=model,
54
+ messages=[
55
+ {
56
+ "role": "system",
57
+ "content": 'Answer with a linux shell command. The answer should be json in the form {"command": <command>, "explanation":<explanation>}',
58
+ },
59
+ {"role": "user", "content": prompt},
60
+ ],
61
+ )
62
+ except openai.APIConnectionError as e:
63
+ print(
64
+ f"Error: can't connect to LLM at url:{url}. Please set the correct url using 'c --set-base-url <url>'"
65
+ )
66
+ raise e
67
+
68
+ str_json = response.choices[0].message.content
69
+ str_json = extract_json_content(str_json)
70
+ try:
71
+ return json.loads(str_json)
72
+ except json.JSONDecodeError as e:
73
+ print("Error decoding json: ", e)
74
+
75
+
76
+ def prefill_input(prefill_text):
77
+ def hook():
78
+ readline.insert_text(prefill_text)
79
+
80
+ # Set the hook to prefill the input
81
+ readline.set_startup_hook(hook)
82
+
83
+ try:
84
+ return input("$ ") # Print the prompt once, let readline handle the rest
85
+ finally:
86
+ readline.set_startup_hook() # Clear the hook after use
87
+
88
+
89
+ def add_to_history(command):
90
+ # history is stored in ~/.bash_history
91
+ readline.add_history(command)
92
+ with open(os.path.expanduser("~/.bash_history"), "a") as f:
93
+ f.write(command + "\n")
94
+
95
+
96
+ def parse_arguments():
97
+
98
+ parser = argparse.ArgumentParser(
99
+ description="Generates a shell command from a description."
100
+ )
101
+ parser.add_argument(
102
+ "description_str",
103
+ nargs="*",
104
+ help="A description of the command to generate. Example: c list all files in the current directory",
105
+ )
106
+ parser.add_argument(
107
+ "--disable-warning",
108
+ action="store_true",
109
+ help="Disable the warning message.",
110
+ default=False,
111
+ )
112
+
113
+ parser.add_argument(
114
+ "--enable-warning",
115
+ action="store_true",
116
+ help="Enable the warning message.",
117
+ default=False,
118
+ )
119
+
120
+ parser.add_argument(
121
+ "--set-base-url",
122
+ type=str,
123
+ help="Set the base url for the OpenAI API.",
124
+ )
125
+
126
+ parser.add_argument(
127
+ "--set-model",
128
+ type=str,
129
+ help="Set the model for the OpenAI API.",
130
+ )
131
+
132
+ parser.add_argument(
133
+ "--api-key",
134
+ type=str,
135
+ help="Set the API key for the OpenAI API.",
136
+ )
137
+
138
+ return parser
139
+
140
+
141
+ def main():
142
+
143
+ parser = parse_arguments()
144
+ args = parser.parse_args()
145
+
146
+ command_instr = " ".join(
147
+ args.description_str
148
+ ).strip() # The instruction to generate a command
149
+
150
+ if args.disable_warning:
151
+ set_config("default", "disable_warning", "True")
152
+ return
153
+
154
+ if args.enable_warning:
155
+ set_config("default", "disable_warning", "False")
156
+ return
157
+
158
+ if args.set_base_url:
159
+ set_config("default", "base_url", args.set_base_url)
160
+ return
161
+
162
+ if args.set_model:
163
+ set_config("default", "model", args.set_model)
164
+ return
165
+
166
+ if args.api_key:
167
+ set_config("default", "api_key", args.api_key)
168
+ return
169
+
170
+ if command_instr == "" or command_instr == "--help" or command_instr == "-h":
171
+ parser.print_help()
172
+ return
173
+
174
+ handle_warning_message()
175
+
176
+ url = get_base_url()
177
+ model = get_model()
178
+ api_key = get_api_key()
179
+
180
+ # Try to generate a valid command N_GENERATION_ATTEMPTS times
181
+ for _ in range(N_GENERATION_ATTEMPTS):
182
+ json_command = generate_command(command_instr, url, model, api_key=api_key)
183
+ try:
184
+ _ = Command(**json_command) # check if the generated command is valid
185
+ break
186
+ except pydantic.ValidationError as e:
187
+ pass
188
+ else:
189
+ print("Error: Could not generate a valid command. Please try again.")
190
+ return
191
+
192
+ print(
193
+ "\033[90m" + json_command["explanation"] + "\033[0m"
194
+ ) # Print the explanation in gray
195
+ edited_command = prefill_input(json_command["command"])
196
+ add_to_history(edited_command) # Add the command to the history
197
+ os.system(edited_command)
@@ -0,0 +1,99 @@
1
+ import configparser
2
+ import functools
3
+ import os
4
+
5
+ SETTINGS_FILE_PATH = os.path.expanduser("~/.config/nlshell/settings.ini")
6
+ DEFAULT_URL = "http://localhost:11434/v1" # default url for local ollama
7
+ DEFAULT_MODEL = "qwen2.5-coder:7b"
8
+
9
+
10
+ @functools.lru_cache(maxsize=None)
11
+ def get_config(section, key):
12
+ """
13
+ Loads the configuration file and returns the 'disable_warning' setting.
14
+ The cache is cleared when the `set_config` function is called.
15
+ """
16
+ config = configparser.ConfigParser()
17
+ config.read(SETTINGS_FILE_PATH)
18
+ return config.get(section, key, fallback=None)
19
+
20
+
21
+ def set_config(section, key, value):
22
+ """
23
+ Create a new configuration file if it doesn't exist, then add the setting.
24
+ And clear the cache for the `get_config` function.
25
+ """
26
+
27
+ get_config.cache_clear()
28
+
29
+ # Create the directory if it doesn't exist
30
+ os.makedirs(os.path.dirname(SETTINGS_FILE_PATH), exist_ok=True)
31
+
32
+ config = configparser.ConfigParser()
33
+ config.read(SETTINGS_FILE_PATH)
34
+ if not config.has_section(section):
35
+ config.add_section(section)
36
+ config.set(section, key, value)
37
+ with open(SETTINGS_FILE_PATH, "w") as f:
38
+ config.write(f)
39
+
40
+
41
+ def handle_warning_message():
42
+ """Displays a warning unless it's disabled in the settings."""
43
+ if not get_config("default", "disable_warning"):
44
+ # print in red color
45
+ print(
46
+ "\033[91m"
47
+ + "WARNING: Using the genereated command can cause serious harm. Never run a command you are not 100% sure of what it will do."
48
+ + "\033[0m"
49
+ )
50
+ print("Disable this warning by running 'c --disable-warning'")
51
+
52
+
53
+ def get_base_url():
54
+ """
55
+ Retrieve the base url from the settings file.
56
+ If the base url is not set, get input from the user.
57
+ """
58
+
59
+ base_url = get_config("default", "base_url")
60
+ if not base_url:
61
+ base_url = input(
62
+ f"Base URL for LLM is not set. \nEnter the base url for the API (default {DEFAULT_URL} (local ollama)): "
63
+ )
64
+ if not base_url:
65
+ base_url = DEFAULT_URL
66
+ set_config("default", "base_url", base_url)
67
+ return base_url
68
+
69
+
70
+ def get_model():
71
+ """
72
+ Retrieve the model from the settings file.
73
+ If the model is not set, get input from the user.
74
+ """
75
+
76
+ model = get_config("default", "model")
77
+ if not model:
78
+ model = input(
79
+ f"Model for LLM is not set. \nEnter the model for the API: (default {DEFAULT_MODEL}): "
80
+ )
81
+ if not model:
82
+ model = DEFAULT_MODEL
83
+ set_config("default", "model", model)
84
+ return model
85
+
86
+
87
+ def get_api_key():
88
+ """
89
+ Retrieve the API key from the settings file.
90
+ If the API key is not set, get input from the user.
91
+ """
92
+
93
+ api_key = get_config("default","api_key")
94
+ if not api_key:
95
+ api_key = input(
96
+ f"API key for OpenAI is not set. \nEnter the API key for the API: "
97
+ )
98
+ set_config("default","api_key",api_key)
99
+ return api_key
@@ -0,0 +1,22 @@
1
+ [tool.poetry]
2
+ name = "nlshell"
3
+ version = "0.1.0"
4
+ description = "Creates shell command from a description using an LLM"
5
+ authors = ["Mattias Brycke <mattias.brycke@gmail.com>"]
6
+ license = "MIT"
7
+ readme = "README.md"
8
+
9
+ [tool.poetry.dependencies]
10
+ python = "^3.10"
11
+ openai = "^1.52.2"
12
+
13
+ [tool.poetry.scripts]
14
+ c = "nlshell.main:main"
15
+
16
+ [build-system]
17
+ requires = ["poetry-core"]
18
+ build-backend = "poetry.core.masonry.api"
19
+
20
+ [project.urls]
21
+
22
+