kopipasta 0.19.0__tar.gz → 0.21.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kopipasta might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kopipasta
3
- Version: 0.19.0
3
+ Version: 0.21.0
4
4
  Summary: A CLI tool to generate prompts with project structure and file contents
5
5
  Home-page: https://github.com/mkorpela/kopipasta
6
6
  Author: Mikko Korpela
@@ -19,15 +19,15 @@ Classifier: Programming Language :: Python :: 3.12
19
19
  Requires-Python: >=3.8
20
20
  Description-Content-Type: text/markdown
21
21
  License-File: LICENSE
22
- Requires-Dist: pyperclip==1.9.0
23
- Requires-Dist: requests==2.32.3
24
22
 
25
23
  # kopipasta
26
24
 
27
25
  [![Version](https://img.shields.io/pypi/v/kopipasta.svg)](https://pypi.python.org/pypi/kopipasta)
28
26
  [![Downloads](http://pepy.tech/badge/kopipasta)](http://pepy.tech/project/kopipasta)
29
27
 
30
- A CLI tool for generating AI-assisted code prompts with project structure and file contents, using an interactive editor-based workflow.
28
+ Beyond TAB TAB TAB. Giving you full control of the context.
29
+
30
+ A CLI tool for generating code task prompts with project structure and file contents, using an interactive editor-based workflow. OR a very easy way to give a large context to an LLM.
31
31
 
32
32
  <img src="kopipasta.jpg" alt="kopipasta" width="300">
33
33
 
@@ -51,7 +51,7 @@ kopipasta [files_or_directories_or_urls]
51
51
 
52
52
  Replace `[files_or_directories_or_urls]` with the paths to the files or directories you want to include in the prompt, as well as any web URLs you want to fetch content from.
53
53
 
54
- Example:
54
+ Example input:
55
55
  ```bash
56
56
  kopipasta src/ config.json https://example.com/api-docs
57
57
  ```
@@ -76,3 +76,16 @@ The generated prompt will be displayed in the console and automatically copied t
76
76
  - Allows interactive selection of files to include
77
77
  - Supports various file types with syntax highlighting in the selection process
78
78
  - Automatically copies the generated prompt to the clipboard
79
+
80
+ ## Real life example
81
+
82
+ Context:
83
+ I had a bug that setup.py did not have all the dependencies. I wanted to make things easier:
84
+
85
+ 1. `kopipasta -t "setup.py should take requirements from requirements.txt" requirements.txt setup.py`
86
+ 2. Opened the service that provides the best LLM currently.
87
+ 3. Pasted the prompt to their chat.
88
+ 4. Reviewed the first message and typed "Proceed".
89
+ 5. Got back the code that fixed the issue.
90
+
91
+
@@ -3,7 +3,9 @@
3
3
  [![Version](https://img.shields.io/pypi/v/kopipasta.svg)](https://pypi.python.org/pypi/kopipasta)
4
4
  [![Downloads](http://pepy.tech/badge/kopipasta)](http://pepy.tech/project/kopipasta)
5
5
 
6
- A CLI tool for generating AI-assisted code prompts with project structure and file contents, using an interactive editor-based workflow.
6
+ Beyond TAB TAB TAB. Giving you full control of the context.
7
+
8
+ A CLI tool for generating code task prompts with project structure and file contents, using an interactive editor-based workflow. OR a very easy way to give a large context to an LLM.
7
9
 
8
10
  <img src="kopipasta.jpg" alt="kopipasta" width="300">
9
11
 
@@ -27,7 +29,7 @@ kopipasta [files_or_directories_or_urls]
27
29
 
28
30
  Replace `[files_or_directories_or_urls]` with the paths to the files or directories you want to include in the prompt, as well as any web URLs you want to fetch content from.
29
31
 
30
- Example:
32
+ Example input:
31
33
  ```bash
32
34
  kopipasta src/ config.json https://example.com/api-docs
33
35
  ```
@@ -52,3 +54,16 @@ The generated prompt will be displayed in the console and automatically copied t
52
54
  - Allows interactive selection of files to include
53
55
  - Supports various file types with syntax highlighting in the selection process
54
56
  - Automatically copies the generated prompt to the clipboard
57
+
58
+ ## Real life example
59
+
60
+ Context:
61
+ I had a bug that setup.py did not have all the dependencies. I wanted to make things easier:
62
+
63
+ 1. `kopipasta -t "setup.py should take requirements from requirements.txt" requirements.txt setup.py`
64
+ 2. Opened the service that provides the best LLM currently.
65
+ 3. Pasted the prompt to their chat.
66
+ 4. Reviewed the first message and typed "Proceed".
67
+ 5. Got back the code that fixed the issue.
68
+
69
+
@@ -827,37 +827,66 @@ def main():
827
827
  web_contents[input_path] = (file_tuple, content)
828
828
  current_char_count += len(content)
829
829
  print(f"Added {'snippet of ' if is_snippet else ''}web content from: {input_path}")
830
+ print_char_count(current_char_count)
830
831
  elif os.path.isfile(input_path):
832
+ # Handle files provided directly via command line
831
833
  if not is_ignored(input_path, ignore_patterns) and not is_binary(input_path):
832
- while True:
833
- file_choice = input(f"{input_path} (y)es include / (n)o skip / (p)atches / (q)uit? ").lower()
834
- if file_choice == 'y':
835
- use_snippet = is_large_file(input_path)
836
- files_to_include.append((input_path, use_snippet, None, get_language_for_file(input_path)))
837
- if use_snippet:
838
- snippet = get_file_snippet(input_path)
839
- current_char_count += len(snippet)
840
- print(get_colored_code(input_path, snippet))
834
+ file_size = os.path.getsize(input_path)
835
+ file_size_readable = get_human_readable_size(file_size)
836
+ file_char_estimate = file_size
837
+ language = get_language_for_file(input_path)
838
+
839
+ if is_large_file(input_path):
840
+ print(f"\nFile {input_path} ({file_size_readable}, ~{file_char_estimate} chars) is large.")
841
+ print("Preview (first ~50 lines or 4KB):")
842
+ print(get_colored_file_snippet(input_path))
843
+ print("-" * 40)
844
+ while True:
845
+ print_char_count(current_char_count)
846
+ choice = input(f"How to include large file {input_path}? (f)ull / (s)nippet / (p)atches / (n)o skip: ").lower()
847
+ if choice == 'f':
848
+ files_to_include.append((input_path, False, None, language))
849
+ current_char_count += file_char_estimate
850
+ print(f"Added full file: {input_path}")
851
+ break
852
+ elif choice == 's':
853
+ snippet_content = get_file_snippet(input_path)
854
+ files_to_include.append((input_path, True, None, language))
855
+ current_char_count += len(snippet_content)
856
+ print(f"Added snippet of file: {input_path}")
857
+ break
858
+ elif choice == 'p':
859
+ chunks, char_count = select_file_patches(input_path)
860
+ if chunks:
861
+ files_to_include.append((input_path, False, chunks, language))
862
+ current_char_count += char_count
863
+ print(f"Added selected patches from file: {input_path}")
864
+ else:
865
+ print(f"No patches selected for {input_path}. Skipping file.")
866
+ break
867
+ elif choice == 'n':
868
+ print(f"Skipped large file: {input_path}")
869
+ break
841
870
  else:
842
- current_char_count += os.path.getsize(input_path)
843
- print(f"Added file: {input_path}{' (snippet)' if use_snippet else ''}")
844
- break
845
- elif file_choice == 'n':
846
- break
847
- elif file_choice == 'p':
848
- chunks, char_count = select_file_patches(input_path)
849
- if chunks:
850
- files_to_include.append((input_path, False, chunks, get_language_for_file(input_path)))
851
- current_char_count += char_count
852
- break
853
- elif file_choice == 'q':
854
- print("Quitting.")
855
- return
856
- else:
857
- print("Invalid choice. Please enter 'y', 'n', 'p', or 'q'.")
871
+ print("Invalid choice. Please enter 'f', 's', 'p', or 'n'.")
872
+ else:
873
+ # Automatically include non-large files
874
+ files_to_include.append((input_path, False, None, language))
875
+ current_char_count += file_char_estimate
876
+ print(f"Added file: {input_path} ({file_size_readable})")
877
+
878
+ # Display current count after processing the file
879
+ print_char_count(current_char_count)
880
+
858
881
  else:
859
- print(f"Ignored file: {input_path}")
882
+ if is_ignored(input_path, ignore_patterns):
883
+ print(f"Ignoring file based on ignore patterns: {input_path}")
884
+ elif is_binary(input_path):
885
+ print(f"Ignoring binary file: {input_path}")
886
+ else:
887
+ print(f"Ignoring file: {input_path}") # Should not happen if logic is correct, but fallback.
860
888
  elif os.path.isdir(input_path):
889
+ print(f"\nProcessing directory specified directly: {input_path}")
861
890
  dir_files, dir_processed, current_char_count = process_directory(input_path, ignore_patterns, current_char_count)
862
891
  files_to_include.extend(dir_files)
863
892
  processed_dirs.update(dir_processed)
@@ -869,19 +898,29 @@ def main():
869
898
  return
870
899
 
871
900
  print("\nFile and web content selection complete.")
872
- print_char_count(current_char_count)
873
- print(f"Summary: Added {len(files_to_include)} files from {len(processed_dirs)} directories and {len(web_contents)} web sources.")
901
+ print_char_count(current_char_count) # Print final count before prompt generation
902
+ print(f"Summary: Added {len(files_to_include)} files and {len(web_contents)} web sources.")
874
903
 
875
904
  prompt_template, cursor_position = generate_prompt_template(files_to_include, ignore_patterns, web_contents, env_vars)
876
905
 
877
906
  if args.task:
878
907
  task_description = args.task
879
- final_prompt = prompt_template[:cursor_position] + task_description + prompt_template[cursor_position:]
908
+ # Insert task description before "## Task Instructions"
909
+ task_marker = "## Task Instructions\n\n"
910
+ insertion_point = prompt_template.find(task_marker)
911
+ if insertion_point != -1:
912
+ final_prompt = prompt_template[:insertion_point + len(task_marker)] + task_description + "\n\n" + prompt_template[insertion_point + len(task_marker):]
913
+ else: # Fallback if marker not found
914
+ final_prompt = prompt_template[:cursor_position] + task_description + prompt_template[cursor_position:]
915
+ print("\nUsing task description from -t argument.")
880
916
  else:
917
+ print("\nOpening editor for task instructions...")
881
918
  final_prompt = open_editor_for_input(prompt_template, cursor_position)
882
919
 
883
920
  print("\n\nGenerated prompt:")
921
+ print("-" * 80)
884
922
  print(final_prompt)
923
+ print("-" * 80)
885
924
 
886
925
  # Copy the prompt to clipboard
887
926
  try:
@@ -892,7 +931,8 @@ def main():
892
931
  final_token_estimate = final_char_count // 4
893
932
  print(f"Prompt has been copied to clipboard. Final size: {final_char_count} characters (~ {final_token_estimate} tokens)")
894
933
  except pyperclip.PyperclipException as e:
895
- print(f"Failed to copy to clipboard: {e}")
934
+ print(f"\nWarning: Failed to copy to clipboard: {e}")
935
+ print("You can manually copy the prompt above.")
896
936
 
897
937
  if __name__ == "__main__":
898
938
  main()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kopipasta
3
- Version: 0.19.0
3
+ Version: 0.21.0
4
4
  Summary: A CLI tool to generate prompts with project structure and file contents
5
5
  Home-page: https://github.com/mkorpela/kopipasta
6
6
  Author: Mikko Korpela
@@ -19,15 +19,15 @@ Classifier: Programming Language :: Python :: 3.12
19
19
  Requires-Python: >=3.8
20
20
  Description-Content-Type: text/markdown
21
21
  License-File: LICENSE
22
- Requires-Dist: pyperclip==1.9.0
23
- Requires-Dist: requests==2.32.3
24
22
 
25
23
  # kopipasta
26
24
 
27
25
  [![Version](https://img.shields.io/pypi/v/kopipasta.svg)](https://pypi.python.org/pypi/kopipasta)
28
26
  [![Downloads](http://pepy.tech/badge/kopipasta)](http://pepy.tech/project/kopipasta)
29
27
 
30
- A CLI tool for generating AI-assisted code prompts with project structure and file contents, using an interactive editor-based workflow.
28
+ Beyond TAB TAB TAB. Giving you full control of the context.
29
+
30
+ A CLI tool for generating code task prompts with project structure and file contents, using an interactive editor-based workflow. OR a very easy way to give a large context to an LLM.
31
31
 
32
32
  <img src="kopipasta.jpg" alt="kopipasta" width="300">
33
33
 
@@ -51,7 +51,7 @@ kopipasta [files_or_directories_or_urls]
51
51
 
52
52
  Replace `[files_or_directories_or_urls]` with the paths to the files or directories you want to include in the prompt, as well as any web URLs you want to fetch content from.
53
53
 
54
- Example:
54
+ Example input:
55
55
  ```bash
56
56
  kopipasta src/ config.json https://example.com/api-docs
57
57
  ```
@@ -76,3 +76,16 @@ The generated prompt will be displayed in the console and automatically copied t
76
76
  - Allows interactive selection of files to include
77
77
  - Supports various file types with syntax highlighting in the selection process
78
78
  - Automatically copies the generated prompt to the clipboard
79
+
80
+ ## Real life example
81
+
82
+ Context:
83
+ I had a bug that setup.py did not have all the dependencies. I wanted to make things easier:
84
+
85
+ 1. `kopipasta -t "setup.py should take requirements from requirements.txt" requirements.txt setup.py`
86
+ 2. Opened the service that provides the best LLM currently.
87
+ 3. Pasted the prompt to their chat.
88
+ 4. Reviewed the first message and typed "Proceed".
89
+ 5. Got back the code that fixed the issue.
90
+
91
+
@@ -1,2 +1,3 @@
1
1
  pyperclip==1.9.0
2
2
  requests==2.32.3
3
+ Pygments==2.18.0
@@ -1,11 +1,16 @@
1
1
  from setuptools import setup, find_packages
2
2
 
3
+ # Read the contents of README.md
3
4
  with open("README.md", "r", encoding="utf-8") as fh:
4
5
  long_description = fh.read()
5
6
 
7
+ # Read the contents of requirements.txt
8
+ with open("requirements.txt", "r", encoding="utf-8") as f:
9
+ requirements = f.read().splitlines()
10
+
6
11
  setup(
7
12
  name="kopipasta",
8
- version="0.19.0",
13
+ version="0.21.0",
9
14
  author="Mikko Korpela",
10
15
  author_email="mikko.korpela@gmail.com",
11
16
  description="A CLI tool to generate prompts with project structure and file contents",
@@ -13,10 +18,7 @@ setup(
13
18
  long_description_content_type="text/markdown",
14
19
  url="https://github.com/mkorpela/kopipasta",
15
20
  packages=find_packages(),
16
- install_requires=[
17
- "pyperclip==1.9.0",
18
- "requests==2.32.3",
19
- ],
21
+ install_requires=requirements,
20
22
  classifiers=[
21
23
  "Development Status :: 3 - Alpha",
22
24
  "Intended Audience :: Developers",
@@ -36,4 +38,4 @@ setup(
36
38
  "kopipasta=kopipasta.main:main",
37
39
  ],
38
40
  },
39
- )
41
+ )
File without changes
File without changes
File without changes
File without changes