patchllm 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
patchllm/__init__.py ADDED
File without changes
patchllm/context.py ADDED
@@ -0,0 +1,175 @@
1
+ import os
2
+ import glob
3
+ import textwrap
4
+ import sys
5
+ from pathlib import Path
6
+
7
+ # --- Default Settings & Templates ---
8
+
9
+ DEFAULT_EXCLUDE_EXTENSIONS = [
10
+ # General
11
+ ".log", ".lock", ".env", ".bak", ".tmp", ".swp", ".swo", ".db", ".sqlite3",
12
+ # Python
13
+ ".pyc", ".pyo", ".pyd",
14
+ # JS/Node
15
+ ".next", ".svelte-kit",
16
+ # OS-specific
17
+ ".DS_Store",
18
+ # Media/Binary files
19
+ ".png", ".jpg", ".jpeg", ".gif", ".svg", ".ico", ".webp",
20
+ ".mp3", ".mp4", ".mov", ".avi", ".pdf",
21
+ ".o", ".so", ".dll", ".exe",
22
+ # Unity specific
23
+ ".meta",
24
+ ]
25
+
26
+ BASE_TEMPLATE = textwrap.dedent('''
27
+ Source Tree:
28
+ ------------
29
+ ```
30
+ {{source_tree}}
31
+ ```
32
+
33
+ Relevant Files:
34
+ ---------------
35
+ {{files_content}}
36
+ ''')
37
+
38
+ # --- Helper Functions (File Discovery, Filtering, Tree Generation) ---
39
+
40
+ def find_files(base_path: Path, include_patterns: list[str], exclude_patterns: list[str] | None = None) -> list[Path]:
41
+ """Finds all files using glob patterns, handling both relative and absolute paths."""
42
+ if exclude_patterns is None:
43
+ exclude_patterns = []
44
+
45
+ def _get_files_from_patterns(patterns: list[str]) -> set[Path]:
46
+ """Helper to process a list of glob patterns and return matching file paths."""
47
+ files = set()
48
+ for pattern_str in patterns:
49
+ pattern_path = Path(pattern_str)
50
+ # If the pattern is absolute, use it as is. Otherwise, join it with the base_path.
51
+ search_path = pattern_path if pattern_path.is_absolute() else base_path / pattern_path
52
+
53
+ for match in glob.glob(str(search_path), recursive=True):
54
+ path_obj = Path(match).resolve()
55
+ if path_obj.is_file():
56
+ files.add(path_obj)
57
+ return files
58
+
59
+ included_files = _get_files_from_patterns(include_patterns)
60
+ excluded_files = _get_files_from_patterns(exclude_patterns)
61
+
62
+ return sorted(list(included_files - excluded_files))
63
+
64
+
65
+ def filter_files_by_keyword(file_paths: list[Path], search_words: list[str]) -> list[Path]:
66
+ """Returns files from a list that contain any of the specified search words."""
67
+ if not search_words:
68
+ return file_paths
69
+
70
+ matching_files = []
71
+ for file_path in file_paths:
72
+ try:
73
+ # Using pathlib's read_text for cleaner code
74
+ if any(word in file_path.read_text(encoding='utf-8', errors='ignore') for word in search_words):
75
+ matching_files.append(file_path)
76
+ except Exception as e:
77
+ print(f"Warning: Could not read {file_path} for keyword search: {e}", file=sys.stderr)
78
+ return matching_files
79
+
80
+
81
+ def generate_source_tree(base_path: Path, file_paths: list[Path]) -> str:
82
+ """Generates a string representation of the file paths as a tree."""
83
+ if not file_paths:
84
+ return "No files found matching the criteria."
85
+
86
+ tree = {}
87
+ for path in file_paths:
88
+ try:
89
+ # Create a path relative to the intended base_path for the tree structure
90
+ rel_path = path.relative_to(base_path)
91
+ except ValueError:
92
+ # This occurs if a file (from an absolute pattern) is outside the base_path.
93
+ # In this case, we use the absolute path as a fallback.
94
+ rel_path = path
95
+
96
+ level = tree
97
+ for part in rel_path.parts:
98
+ level = level.setdefault(part, {})
99
+
100
+ def _format_tree(tree_dict, indent=""):
101
+ lines = []
102
+ items = sorted(tree_dict.items(), key=lambda i: (not i[1], i[0]))
103
+ for i, (name, node) in enumerate(items):
104
+ last = i == len(items) - 1
105
+ connector = "└── " if last else "├── "
106
+ lines.append(f"{indent}{connector}{name}")
107
+ if node:
108
+ new_indent = indent + (" " if last else "│ ")
109
+ lines.extend(_format_tree(node, new_indent))
110
+ return lines
111
+
112
+ return f"{base_path.name}\n" + "\n".join(_format_tree(tree))
113
+
114
+
115
+ # --- Main Context Building Function ---
116
+
117
+ def build_context(config: dict) -> dict | None:
118
+ """
119
+ Builds the context string from files specified in the config.
120
+
121
+ Args:
122
+ config (dict): The configuration for file searching.
123
+
124
+ Returns:
125
+ dict: A dictionary with the source tree and formatted context, or None.
126
+ """
127
+ # Resolve the base path immediately to get a predictable absolute path.
128
+ base_path = Path(config.get("path", ".")).resolve()
129
+
130
+ include_patterns = config.get("include_patterns", [])
131
+ exclude_patterns = config.get("exclude_patterns", [])
132
+ exclude_extensions = config.get("exclude_extensions", DEFAULT_EXCLUDE_EXTENSIONS)
133
+ search_words = config.get("search_words", [])
134
+
135
+ # Step 1: Find files
136
+ relevant_files = find_files(base_path, include_patterns, exclude_patterns)
137
+
138
+ # Step 2: Filter by extension
139
+ count_before_ext = len(relevant_files)
140
+ norm_ext = {ext.lower() for ext in exclude_extensions}
141
+ relevant_files = [p for p in relevant_files if p.suffix.lower() not in norm_ext]
142
+ if count_before_ext > len(relevant_files):
143
+ print(f"Filtered {count_before_ext - len(relevant_files)} files by extension.")
144
+
145
+ # Step 3: Filter by keyword
146
+ if search_words:
147
+ count_before_kw = len(relevant_files)
148
+ relevant_files = filter_files_by_keyword(relevant_files, search_words)
149
+ print(f"Filtered {count_before_kw - len(relevant_files)} files by keyword search.")
150
+
151
+ if not relevant_files:
152
+ print("\nNo files matched the specified criteria.")
153
+ return None
154
+
155
+ print(f"\nFinal count of relevant files: {len(relevant_files)}.")
156
+
157
+ # Generate source tree and file content blocks
158
+ source_tree_str = generate_source_tree(base_path, relevant_files)
159
+
160
+ file_contents = []
161
+ for file_path in relevant_files:
162
+ try:
163
+ display_path = file_path.as_posix()
164
+ content = file_path.read_text(encoding='utf-8')
165
+ file_contents.append(f"<file_path:{display_path}>\n```\n{content}\n```")
166
+ except Exception as e:
167
+ print(f"Warning: Could not read file {file_path}: {e}", file=sys.stderr)
168
+
169
+ files_content_str = "\n\n".join(file_contents)
170
+
171
+ # Assemble the final context using the base template
172
+ final_context = BASE_TEMPLATE.replace("{{source_tree}}", source_tree_str)
173
+ final_context = final_context.replace("{{files_content}}", files_content_str)
174
+
175
+ return {"tree": source_tree_str, "context": final_context}
patchllm/listener.py ADDED
@@ -0,0 +1,28 @@
1
+ import speech_recognition as sr
2
+ import pyttsx3
3
+
4
+ recognizer = sr.Recognizer()
5
+ tts_engine = pyttsx3.init()
6
+
7
+ def speak(text):
8
+ print("🤖 Speaking:", text)
9
+ tts_engine.say(text)
10
+ tts_engine.runAndWait()
11
+
12
+ def listen(prompt=None, timeout=5):
13
+ with sr.Microphone() as source:
14
+ if prompt:
15
+ speak(prompt)
16
+ print("🎙 Listening...")
17
+ try:
18
+ audio = recognizer.listen(source, timeout=timeout)
19
+ text = recognizer.recognize_google(audio)
20
+ print(f"🗣 Recognized: {text}")
21
+ return text
22
+ except sr.WaitTimeoutError:
23
+ speak("No speech detected.")
24
+ except sr.UnknownValueError:
25
+ speak("Sorry, I didn’t catch that.")
26
+ except sr.RequestError:
27
+ speak("Speech recognition failed. Check your internet.")
28
+ return None
patchllm/main.py ADDED
@@ -0,0 +1,230 @@
1
+ import sys
2
+
3
+ from context import build_context
4
+ from parser import paste_response
5
+ from utils import load_from_py_file
6
+ import textwrap
7
+ import argparse
8
+ import litellm
9
+
10
+ from dotenv import load_dotenv
11
+
12
+ load_dotenv()
13
+
14
+ class Assistant:
15
+ """
16
+ An assistant that builds context, interacts with an LLM, and applies code changes.
17
+ """
18
+ def __init__(
19
+ self,
20
+ model_name="gemini/gemini-2.5-flash",
21
+ configs_file = "./configs.py",
22
+ ):
23
+ """
24
+ Initializes the Assistant.
25
+ Args:
26
+ model_name (str): The alias for the generative model to use (must be a litellm supported model string).
27
+ configs_file (str): The path to the configurations file.
28
+ """
29
+ self.model_name = model_name
30
+ self.configs = load_from_py_file(configs_file, "configs")
31
+ system_prompt = textwrap.dedent("""
32
+ You are an expert pair programmer. Your purpose is to help users by modifying files based on their instructions.
33
+
34
+ Follow these rules strictly:
35
+ Your output should be a single file including all the updated files. For each file-block:
36
+ 1. Only include code for files that need to be updated / edited.
37
+ 2. For updated files, do not exclude any code even if it is unchanged code; assume the file code will be copy-pasted full in the file.
38
+ 3. Do not include verbose inline comments explaining what every small change does. Try to keep comments concise but informative, if any.
39
+ 4. Only update the relevant parts of each file relative to the provided task; do not make irrelevant edits even if you notice areas of improvements elsewhere.
40
+ 5. Do not use diffs.
41
+ 6. Make sure each file-block is returned in the following exact format. No additional text, comments, or explanations should be outside these blocks.
42
+
43
+ Expected format for a modified or new file:
44
+ <file_path:/absolute/path/to/your/file.py>
45
+ ```python
46
+ # The full, complete content of /absolute/path/to/your/file.py goes here.
47
+ def example_function():
48
+ return "Hello, World!"
49
+ ```
50
+
51
+ Example of multiple files:
52
+ <file_path:/home/user/project/src/main.py>
53
+ ```python
54
+ print("Main application start")
55
+ ```
56
+
57
+ <file_path:/home/user/project/tests/test_main.py>
58
+ ```python
59
+ def test_main():
60
+ assert True
61
+ ```
62
+ """)
63
+ self.history = [{"role": "system", "content": system_prompt}]
64
+
65
+ def collect(self, config_name):
66
+ """Builds the code context from a provided configuration dictionary."""
67
+ print("\n--- Building Code Context... ---")
68
+ selected_config = self.configs.get(config_name)
69
+ if selected_config is None:
70
+ raise KeyError(f"Context config '{config_name}' not found in provided configs file.")
71
+ context_object = build_context(selected_config)
72
+ if context_object:
73
+ tree, context = context_object.values()
74
+ print("--- Context Building Finished. The following files were extracted ---", file=sys.stderr)
75
+ print(tree)
76
+ return context
77
+ else:
78
+ print("--- Context Building Failed (No files found) ---", file=sys.stderr)
79
+ return None
80
+
81
+ def update(self, task_instructions, context=None):
82
+ """
83
+ Assembles the final prompt and sends it to the LLM to generate code,
84
+ then in-place update the files from the response.
85
+ Args:
86
+ task_instructions (str): Specific instructions for this run.
87
+ context (str, optional): The code context. If None, only the task is sent.
88
+ """
89
+ print("\n--- Sending Prompt to LLM... ---")
90
+ final_prompt = task_instructions
91
+ if context:
92
+ final_prompt = f"{context}\n\n{task_instructions}"
93
+
94
+ self.history.append({"role": "user", "content": final_prompt})
95
+
96
+ try:
97
+ response = litellm.completion(model=self.model_name, messages=self.history)
98
+
99
+ # Extract the message content from the response
100
+ assistant_response_content = response.choices[0].message.content
101
+
102
+ # Add the assistant's response to the history for future context
103
+ self.history.append({"role": "assistant", "content": assistant_response_content})
104
+
105
+ if not assistant_response_content or not assistant_response_content.strip():
106
+ print("Response is empty. Nothing to paste.")
107
+ return
108
+
109
+ print("\n--- Updating files ---")
110
+ paste_response(assistant_response_content)
111
+ print("--- File Update Process Finished ---")
112
+
113
+ except Exception as e:
114
+ # If an error occurs, remove the last user message to keep history clean
115
+ self.history.pop()
116
+ raise RuntimeError(f"An error occurred while communicating with the LLM via litellm: {e}") from e
117
+
118
+ def write(self, file_path, context):
119
+ """Utility function to write the context to a file"""
120
+ print("Exporting context..")
121
+ with open(file_path, "w") as file:
122
+ file.write(context)
123
+ print(f'Context exported to {file_path.split("/")[-1]}')
124
+
125
+ def read(self, file_path):
126
+ """Utility function to read and return the content of a file."""
127
+ print("Importing from file..")
128
+ try:
129
+ with open(file_path, "r") as file:
130
+ print("Finished reading")
131
+ content = file.read()
132
+ return content
133
+ except Exception as e:
134
+ raise RuntimeError(f"Failed to read from file {file_path}: {e}") from e
135
+
136
+ def main():
137
+ parser = argparse.ArgumentParser(
138
+ description="Run the Assistant tool to apply code changes using an LLM."
139
+ )
140
+ parser.add_argument(
141
+ "--config",
142
+ type=str,
143
+ default=None,
144
+ help="Name of the config key to use from the configs.py file."
145
+ )
146
+ parser.add_argument(
147
+ "--task",
148
+ type=str,
149
+ default=None,
150
+ help="The task instructions to guide the assistant."
151
+ )
152
+ parser.add_argument(
153
+ "--context-out",
154
+ type=str,
155
+ default=None,
156
+ help="Optional path to export the generated context to a file."
157
+ )
158
+ parser.add_argument(
159
+ "--context-in",
160
+ type=str,
161
+ default=None,
162
+ help="Optional path to import a previously saved context from a file."
163
+ )
164
+ parser.add_argument(
165
+ "--model",
166
+ type=str,
167
+ default="gemini/gemini-2.5-flash",
168
+ help="Optional model name to override the default model."
169
+ )
170
+ parser.add_argument(
171
+ "--from-file",
172
+ type=str,
173
+ default=None,
174
+ help="File path for a file with pre-formatted updates."
175
+ )
176
+ parser.add_argument(
177
+ "--update",
178
+ type=str,
179
+ default="True",
180
+ help="Whether to pass the input context to the llm to update the files."
181
+ )
182
+ parser.add_argument(
183
+ "--voice",
184
+ type=str,
185
+ default="False",
186
+ help="Whether to interact with the script using voice commands."
187
+ )
188
+
189
+ args = parser.parse_args()
190
+
191
+ assistant = Assistant(model_name=args.model)
192
+
193
+ # Handle voice input
194
+ if args.voice not in ["False", "false"]:
195
+ from listener import listen, speak
196
+
197
+ speak("Say your task instruction.")
198
+ task = listen()
199
+ if not task:
200
+ speak("No instruction heard. Exiting.")
201
+ return
202
+
203
+ speak(f"You said: {task}. Should I proceed?")
204
+ confirm = listen()
205
+ if confirm and "yes" in confirm.lower():
206
+ context = assistant.collect(args.config)
207
+ assistant.update(task_instructions=task, context=context)
208
+ speak("Changes applied.")
209
+ else:
210
+ speak("Cancelled.")
211
+ return
212
+
213
+ # Parse updates from a local file
214
+ if args.from_file:
215
+ updates = assistant.read(args.from_file)
216
+ paste_response(updates)
217
+ return
218
+
219
+ # Otherwise generate updates from llm response
220
+ if args.context_in:
221
+ context = assistant.read(args.context_in)
222
+ else:
223
+ context = assistant.collect(args.config)
224
+ if args.context_out:
225
+ assistant.write(args.context_out, context)
226
+ if not args.update in ["False", "false"]:
227
+ assistant.update(task_instructions=args.task, context=context)
228
+
229
+ if __name__ == "__main__":
230
+ main()
patchllm/parser.py ADDED
@@ -0,0 +1,72 @@
1
+ import re
2
+ from pathlib import Path
3
+
4
+ def paste_response(response_content):
5
+ """
6
+ Parses a response containing code blocks and writes them to files,
7
+ handling both absolute and relative paths safely.
8
+
9
+ Args:
10
+ response_content (str): The string response from the LLM.
11
+ """
12
+ pattern = re.compile(
13
+ r"<file_path:([^>]+?)>\s*```(?:.*?)\n(.*?)\n```",
14
+ re.DOTALL | re.MULTILINE
15
+ )
16
+
17
+ matches = pattern.finditer(response_content)
18
+ files_processed = 0
19
+ found_matches = False
20
+
21
+ for match in matches:
22
+ found_matches = True
23
+ file_path_str = match.group(1).strip()
24
+ code_content = match.group(2)
25
+
26
+ if not file_path_str:
27
+ print("Warning: Found a code block with an empty file path. Skipping.")
28
+ continue
29
+
30
+ print(f"Found path in response: '{file_path_str}'")
31
+ raw_path = Path(file_path_str)
32
+
33
+ # Determine the final target path.
34
+ # If the path from the LLM is absolute, use it directly.
35
+ # If it's relative, resolve it against the current working directory.
36
+ if raw_path.is_absolute():
37
+ target_path = raw_path
38
+ else:
39
+ target_path = Path.cwd() / raw_path
40
+
41
+ # Normalize the path to resolve any ".." or "." segments.
42
+ target_path = target_path.resolve()
43
+
44
+ try:
45
+ # Ensure parent directory exists
46
+ target_path.parent.mkdir(parents=True, exist_ok=True)
47
+
48
+ # If file exists, compare content to avoid unnecessary overwrites
49
+ if target_path.exists():
50
+ with open(target_path, 'r', encoding='utf-8') as existing_file:
51
+ if existing_file.read() == code_content:
52
+ print(f" -> No changes for '{target_path}', skipping.")
53
+ continue
54
+
55
+ # Write the extracted code to the file
56
+ with open(target_path, 'w', encoding='utf-8') as outfile:
57
+ outfile.write(code_content)
58
+
59
+ print(f" -> Wrote {len(code_content)} bytes to '{target_path}'")
60
+ files_processed += 1
61
+
62
+ except OSError as e:
63
+ print(f" -> Error writing file '{target_path}': {e}")
64
+ except Exception as e:
65
+ print(f" -> An unexpected error occurred for file '{target_path}': {e}")
66
+
67
+ if not found_matches:
68
+ print("\nNo file paths and code blocks matching the expected format were found in the response.")
69
+ elif files_processed > 0:
70
+ print(f"\nSuccessfully processed {files_processed} file(s).")
71
+ else:
72
+ print("\nFound matching blocks, but no files were written.")
patchllm/utils.py ADDED
@@ -0,0 +1,18 @@
1
+ import importlib.util
2
+ from pathlib import Path
3
+
4
+ def load_from_py_file(file_path, dict_name):
5
+ """Dynamically loads a dictionary from a Python file."""
6
+ path = Path(file_path)
7
+ if not path.is_file():
8
+ raise FileNotFoundError(f"The file '{path}' was not found.")
9
+
10
+ spec = importlib.util.spec_from_file_location(path.stem, path)
11
+ module = importlib.util.module_from_spec(spec)
12
+ spec.loader.exec_module(module)
13
+
14
+ dictionary = getattr(module, dict_name, None)
15
+ if not isinstance(dictionary, dict):
16
+ raise TypeError(f"The file '{path}' must contain a dictionary named '{dict_name}'.")
17
+
18
+ return dictionary
@@ -0,0 +1,83 @@
1
+ Metadata-Version: 2.4
2
+ Name: patchllm
3
+ Version: 0.1.0
4
+ Summary: Lightweight tool to manage contexts and update code with LLMs
5
+ Author: nassimberrada
6
+ License: MIT License
7
+
8
+ Copyright (c) 2025 nassimberrada
9
+
10
+ Permission is hereby granted, free of charge, to any person obtaining a copy
11
+ of this software and associated documentation files (the “Software”), to deal
12
+ in the Software without restriction, including without limitation the rights
13
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14
+ copies of the Software, and to permit persons to whom the Software is
15
+ furnished to do so, subject to the following conditions:
16
+
17
+ The above copyright notice and this permission notice shall be included in all
18
+ copies or substantial portions of the Software.
19
+
20
+ THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26
+ SOFTWARE.
27
+ Requires-Python: >=3.8
28
+ Description-Content-Type: text/markdown
29
+ License-File: LICENSE
30
+ Requires-Dist: litellm
31
+ Dynamic: license-file
32
+
33
+ <p align="center">
34
+ <picture>
35
+ <source srcset="./assets/logo_dark.png" media="(prefers-color-scheme: dark)">
36
+ <source srcset="./assets/logo_light.png" media="(prefers-color-scheme: light)">
37
+ <img src="./assets/logo_light.png" alt="PatchLLM Logo" height="200">
38
+ </picture>
39
+ </p>
40
+
41
+ ## About
42
+ PatchLLM lets you flexibly build LLM context from your codebase using search patterns, and automatically edit files from the LLM response in a couple lines of code.
43
+
44
+ ## Usage
45
+ Here's a basic example of how to use the `Assistant` class:
46
+
47
+ ```python
48
+ from main import Assistant
49
+
50
+ assistant = Assistant()
51
+
52
+ context = assistant.collect(config_name="default")
53
+ >> The following files were extracted:
54
+ >> my_project
55
+ >> ├── README.md
56
+ >> ├── configs.py
57
+ >> ├── context.py
58
+ >> ├── main.py
59
+ >> ├── parser.py
60
+ >> ├── requirements.txt
61
+ >> ├── systems.py
62
+ >> └── utils.py
63
+
64
+ assistant.update("Fix any bug in these files", context=context)
65
+ >> Wrote 5438 bytes to '/my_project/context.py'
66
+ >> Wrote 1999 bytes to '/my_project/utils.py'
67
+ >> Wrote 2345 bytes to '/my_project/main.py'
68
+ ```
69
+
70
+ You can decide which files to include / exclude from the prompt by adding a config in `configs.py`, specifying:
71
+ - `path`: The root path from which to perform the file search
72
+ - `include_patterns`: A list of glob patterns for files to include. e.g `[./**/*]`
73
+ - `exclude_patterns`: A list of glob patterns for files to exlucde. e.g `[./*.md]`
74
+ - `search_word`: A list of keywords included in the target files. e.g `["config"]`
75
+ - `exclude_extensions`: A list of file extensions to exclude. e.g `[.jpg]`
76
+
77
+ ### Setup
78
+
79
+ PatchLLM uses [LiteLLM](https://github.com/BerriAI/litellm) under the hood. Please refer to their documentation for environment variable naming and available models.
80
+
81
+ ## License
82
+
83
+ This project is licensed under the MIT License. See the `LICENSE` file for details.
@@ -0,0 +1,12 @@
1
+ patchllm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ patchllm/context.py,sha256=zUrXf5l3cdxAbmxB7IjbShTAWA_ZEMBz8OGlaB-cofE,6450
3
+ patchllm/listener.py,sha256=EdcceJCLEoSftX1dVSWxtwBsLaII2lcZ0VnllHwCGWI,845
4
+ patchllm/main.py,sha256=-11bAS-bx2SfGx14KCCZhuwrfh_FDcQ80cwUfYrszY8,8569
5
+ patchllm/parser.py,sha256=4wipa6deoE2gUIhYrvUZcbKTIr5j6lw5Z6bOItUH6YI,2629
6
+ patchllm/utils.py,sha256=hz28hd017gRGT632VQAYLPdX0KAS1GLvZzeUDCKbLc0,647
7
+ patchllm-0.1.0.dist-info/licenses/LICENSE,sha256=vZxgIRNxffjkTV2NWLemgYjDRu0hSMTyFXCZ1zEWbUc,1077
8
+ patchllm-0.1.0.dist-info/METADATA,sha256=BtSvIfjwiWqvv-1d_GPBf1n7ao9R2YY3m8Qd_Rr4c6A,3404
9
+ patchllm-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
10
+ patchllm-0.1.0.dist-info/entry_points.txt,sha256=_jqCdL7snk6RZfqiSzP_XttWYAPNw_UdnAEqYS-rrd8,48
11
+ patchllm-0.1.0.dist-info/top_level.txt,sha256=SLIZj9EhBXbSnYrbnV8EjL-OfNz-hXRwABCPCjE5Fas,9
12
+ patchllm-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ yourtool = patchllm.main:main
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 nassimberrada
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the “Software”), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ patchllm