contextforllm 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- contextforllm-0.1.0/LICENSE +20 -0
- contextforllm-0.1.0/PKG-INFO +145 -0
- contextforllm-0.1.0/README.md +113 -0
- contextforllm-0.1.0/contextforllm/__init__.py +0 -0
- contextforllm-0.1.0/contextforllm/__main__.py +34 -0
- contextforllm-0.1.0/contextforllm/app.py +225 -0
- contextforllm-0.1.0/contextforllm/context_builder.py +178 -0
- contextforllm-0.1.0/contextforllm/project_summary.py +87 -0
- contextforllm-0.1.0/contextforllm/ui/index.html +974 -0
- contextforllm-0.1.0/contextforllm.egg-info/PKG-INFO +145 -0
- contextforllm-0.1.0/contextforllm.egg-info/SOURCES.txt +15 -0
- contextforllm-0.1.0/contextforllm.egg-info/dependency_links.txt +1 -0
- contextforllm-0.1.0/contextforllm.egg-info/entry_points.txt +2 -0
- contextforllm-0.1.0/contextforllm.egg-info/requires.txt +3 -0
- contextforllm-0.1.0/contextforllm.egg-info/top_level.txt +1 -0
- contextforllm-0.1.0/pyproject.toml +26 -0
- contextforllm-0.1.0/setup.cfg +4 -0
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
|
|
2
|
+
Copyright (c) 2025 Desai
|
|
3
|
+
|
|
4
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
5
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
6
|
+
in the Software without restriction, including without limitation the rights
|
|
7
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
8
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
9
|
+
furnished to do so, subject to the following conditions:
|
|
10
|
+
|
|
11
|
+
The above copyright notice and this permission notice shall be included in all
|
|
12
|
+
copies or substantial portions of the Software.
|
|
13
|
+
|
|
14
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
15
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
16
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
17
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
18
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
19
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
20
|
+
SOFTWARE.
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: contextforllm
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Local web UI for generating LLM-ready context from any codebase
|
|
5
|
+
License:
|
|
6
|
+
Copyright (c) 2025 Desai
|
|
7
|
+
|
|
8
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
9
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
10
|
+
in the Software without restriction, including without limitation the rights
|
|
11
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
12
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
13
|
+
furnished to do so, subject to the following conditions:
|
|
14
|
+
|
|
15
|
+
The above copyright notice and this permission notice shall be included in all
|
|
16
|
+
copies or substantial portions of the Software.
|
|
17
|
+
|
|
18
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
19
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
20
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
21
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
22
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
23
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
24
|
+
SOFTWARE.
|
|
25
|
+
Requires-Python: >=3.8
|
|
26
|
+
Description-Content-Type: text/markdown
|
|
27
|
+
License-File: LICENSE
|
|
28
|
+
Requires-Dist: flask
|
|
29
|
+
Requires-Dist: tiktoken
|
|
30
|
+
Requires-Dist: groq
|
|
31
|
+
Dynamic: license-file
|
|
32
|
+
|
|
33
|
+
# ContextForLLM
|
|
34
|
+
|
|
35
|
+
A local web UI that scans any project folder on your machine, lets you select and annotate files, and generates a structured prompt you can paste directly into any LLM chat — Claude, ChatGPT, Gemini, or any other.
|
|
36
|
+
|
|
37
|
+
---
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
## Why this exists
|
|
41
|
+
|
|
42
|
+
Every other tool in this space is a CLI that dumps your entire repo into one file. ContextForLLM gives you a browser UI where you can:
|
|
43
|
+
|
|
44
|
+
- Toggle individual files in or out
|
|
45
|
+
- Add a note to any file that gets embedded into the prompt
|
|
46
|
+
- Generate an AI summary of your project using Groq
|
|
47
|
+
- Automatically split large projects into sequenced prompt parts
|
|
48
|
+
- Set your task so the LLM knows exactly what to do
|
|
49
|
+
|
|
50
|
+
---
|
|
51
|
+
|
|
52
|
+
## Demo
|
|
53
|
+
|
|
54
|
+
> Screenshot / GIF coming soon
|
|
55
|
+
|
|
56
|
+
---
|
|
57
|
+
|
|
58
|
+
## Installation
|
|
59
|
+
|
|
60
|
+
You need Python 3.8 or higher installed.
|
|
61
|
+
|
|
62
|
+
**Step 1 — Clone the repo**
|
|
63
|
+
```bash
|
|
64
|
+
git clone https://github.com/Desai-23/ContextForLLM.git
|
|
65
|
+
cd ContextForLLM
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
**Step 2 — Install dependencies**
|
|
69
|
+
```bash
|
|
70
|
+
pip install -r requirements.txt
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
**Step 3 — Run the app**
|
|
74
|
+
```bash
|
|
75
|
+
python app.py
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
Then open your browser at:
|
|
79
|
+
```
|
|
80
|
+
http://localhost:5000
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
---
|
|
84
|
+
|
|
85
|
+
## How to use it
|
|
86
|
+
|
|
87
|
+
1. Paste the path to any project folder on your machine
|
|
88
|
+
2. Click Scan
|
|
89
|
+
3. Review the files — toggle any file off to exclude it from the prompt
|
|
90
|
+
4. Add annotations to files if needed (the LLM will see these notes)
|
|
91
|
+
5. Optionally generate an AI summary of your project using Groq
|
|
92
|
+
6. Set your task — what you want the LLM to do
|
|
93
|
+
7. Click Generate Context Prompt
|
|
94
|
+
8. Copy the prompt and paste it into any LLM chat
|
|
95
|
+
|
|
96
|
+
---
|
|
97
|
+
|
|
98
|
+
## Features
|
|
99
|
+
|
|
100
|
+
- Local — your code never leaves your machine
|
|
101
|
+
- Browser UI — no terminal required after launch
|
|
102
|
+
- Per-file exclusion — toggle files in or out with a switch
|
|
103
|
+
- Per-file annotations — add notes that get embedded into the prompt
|
|
104
|
+
- Token counter — live token count with a visual usage bar
|
|
105
|
+
- Prompt splitting — large projects automatically split into sequenced parts with handoff instructions
|
|
106
|
+
- AI project summary — uses Groq to generate a project summary injected at the top of every prompt
|
|
107
|
+
- .contextignore support — create a .contextignore file in any project to permanently exclude files
|
|
108
|
+
|
|
109
|
+
---
|
|
110
|
+
|
|
111
|
+
## .contextignore
|
|
112
|
+
|
|
113
|
+
Create a `.contextignore` file in any project folder to exclude files automatically on scan. Uses the same pattern syntax as `.gitignore`.
|
|
114
|
+
|
|
115
|
+
Example:
|
|
116
|
+
```
|
|
117
|
+
*.test.js
|
|
118
|
+
migrations/
|
|
119
|
+
old_auth.py
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
---
|
|
123
|
+
|
|
124
|
+
## Groq API key
|
|
125
|
+
|
|
126
|
+
The AI summary feature requires a free Groq API key.
|
|
127
|
+
|
|
128
|
+
1. Get a free key at console.groq.com
|
|
129
|
+
2. Click "Add Groq Key" in the top right of the UI
|
|
130
|
+
3. Paste your key — it is held in memory only and never saved to disk
|
|
131
|
+
|
|
132
|
+
---
|
|
133
|
+
|
|
134
|
+
## Tech stack
|
|
135
|
+
|
|
136
|
+
- Python / Flask — backend server
|
|
137
|
+
- Vanilla HTML, CSS, JS — frontend UI
|
|
138
|
+
- tiktoken — token counting
|
|
139
|
+
- Groq — AI project summary (optional)
|
|
140
|
+
|
|
141
|
+
---
|
|
142
|
+
|
|
143
|
+
## License
|
|
144
|
+
|
|
145
|
+
MIT
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
# ContextForLLM
|
|
2
|
+
|
|
3
|
+
A local web UI that scans any project folder on your machine, lets you select and annotate files, and generates a structured prompt you can paste directly into any LLM chat — Claude, ChatGPT, Gemini, or any other.
|
|
4
|
+
|
|
5
|
+
---
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
## Why this exists
|
|
9
|
+
|
|
10
|
+
Every other tool in this space is a CLI that dumps your entire repo into one file. ContextForLLM gives you a browser UI where you can:
|
|
11
|
+
|
|
12
|
+
- Toggle individual files in or out
|
|
13
|
+
- Add a note to any file that gets embedded into the prompt
|
|
14
|
+
- Generate an AI summary of your project using Groq
|
|
15
|
+
- Automatically split large projects into sequenced prompt parts
|
|
16
|
+
- Set your task so the LLM knows exactly what to do
|
|
17
|
+
|
|
18
|
+
---
|
|
19
|
+
|
|
20
|
+
## Demo
|
|
21
|
+
|
|
22
|
+
> Screenshot / GIF coming soon
|
|
23
|
+
|
|
24
|
+
---
|
|
25
|
+
|
|
26
|
+
## Installation
|
|
27
|
+
|
|
28
|
+
You need Python 3.8 or higher installed.
|
|
29
|
+
|
|
30
|
+
**Step 1 — Clone the repo**
|
|
31
|
+
```bash
|
|
32
|
+
git clone https://github.com/Desai-23/ContextForLLM.git
|
|
33
|
+
cd ContextForLLM
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
**Step 2 — Install dependencies**
|
|
37
|
+
```bash
|
|
38
|
+
pip install -r requirements.txt
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
**Step 3 — Run the app**
|
|
42
|
+
```bash
|
|
43
|
+
python app.py
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
Then open your browser at:
|
|
47
|
+
```
|
|
48
|
+
http://localhost:5000
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
---
|
|
52
|
+
|
|
53
|
+
## How to use it
|
|
54
|
+
|
|
55
|
+
1. Paste the path to any project folder on your machine
|
|
56
|
+
2. Click Scan
|
|
57
|
+
3. Review the files — toggle any file off to exclude it from the prompt
|
|
58
|
+
4. Add annotations to files if needed (the LLM will see these notes)
|
|
59
|
+
5. Optionally generate an AI summary of your project using Groq
|
|
60
|
+
6. Set your task — what you want the LLM to do
|
|
61
|
+
7. Click Generate Context Prompt
|
|
62
|
+
8. Copy the prompt and paste it into any LLM chat
|
|
63
|
+
|
|
64
|
+
---
|
|
65
|
+
|
|
66
|
+
## Features
|
|
67
|
+
|
|
68
|
+
- Local — your code never leaves your machine
|
|
69
|
+
- Browser UI — no terminal required after launch
|
|
70
|
+
- Per-file exclusion — toggle files in or out with a switch
|
|
71
|
+
- Per-file annotations — add notes that get embedded into the prompt
|
|
72
|
+
- Token counter — live token count with a visual usage bar
|
|
73
|
+
- Prompt splitting — large projects automatically split into sequenced parts with handoff instructions
|
|
74
|
+
- AI project summary — uses Groq to generate a project summary injected at the top of every prompt
|
|
75
|
+
- .contextignore support — create a .contextignore file in any project to permanently exclude files
|
|
76
|
+
|
|
77
|
+
---
|
|
78
|
+
|
|
79
|
+
## .contextignore
|
|
80
|
+
|
|
81
|
+
Create a `.contextignore` file in any project folder to exclude files automatically on scan. Uses the same pattern syntax as `.gitignore`.
|
|
82
|
+
|
|
83
|
+
Example:
|
|
84
|
+
```
|
|
85
|
+
*.test.js
|
|
86
|
+
migrations/
|
|
87
|
+
old_auth.py
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
---
|
|
91
|
+
|
|
92
|
+
## Groq API key
|
|
93
|
+
|
|
94
|
+
The AI summary feature requires a free Groq API key.
|
|
95
|
+
|
|
96
|
+
1. Get a free key at console.groq.com
|
|
97
|
+
2. Click "Add Groq Key" in the top right of the UI
|
|
98
|
+
3. Paste your key — it is held in memory only and never saved to disk
|
|
99
|
+
|
|
100
|
+
---
|
|
101
|
+
|
|
102
|
+
## Tech stack
|
|
103
|
+
|
|
104
|
+
- Python / Flask — backend server
|
|
105
|
+
- Vanilla HTML, CSS, JS — frontend UI
|
|
106
|
+
- tiktoken — token counting
|
|
107
|
+
- Groq — AI project summary (optional)
|
|
108
|
+
|
|
109
|
+
---
|
|
110
|
+
|
|
111
|
+
## License
|
|
112
|
+
|
|
113
|
+
MIT
|
|
File without changes
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import subprocess
|
|
4
|
+
import webbrowser
|
|
5
|
+
import time
|
|
6
|
+
|
|
7
|
+
def main():
|
|
8
|
+
app_dir = os.path.dirname(os.path.abspath(__file__))
|
|
9
|
+
app_path = os.path.join(app_dir, "app.py")
|
|
10
|
+
|
|
11
|
+
print("")
|
|
12
|
+
print("Starting ContextForLLM...")
|
|
13
|
+
print("")
|
|
14
|
+
|
|
15
|
+
process = subprocess.Popen(
|
|
16
|
+
[sys.executable, app_path],
|
|
17
|
+
cwd=app_dir
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
time.sleep(2)
|
|
21
|
+
webbrowser.open("http://127.0.0.1:5000")
|
|
22
|
+
|
|
23
|
+
try:
|
|
24
|
+
process.wait()
|
|
25
|
+
except KeyboardInterrupt:
|
|
26
|
+
process.terminate()
|
|
27
|
+
print("")
|
|
28
|
+
print("ContextForLLM stopped.")
|
|
29
|
+
print("")
|
|
30
|
+
|
|
31
|
+
if __name__ == "__main__":
|
|
32
|
+
main()
|
|
33
|
+
|
|
34
|
+
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
from flask import Flask, request, jsonify, send_from_directory
|
|
4
|
+
from context_builder import (
|
|
5
|
+
build_folder_tree,
|
|
6
|
+
collect_files,
|
|
7
|
+
build_file_block,
|
|
8
|
+
build_header,
|
|
9
|
+
split_into_prompts,
|
|
10
|
+
save_prompts,
|
|
11
|
+
count_tokens,
|
|
12
|
+
load_contextignore,
|
|
13
|
+
)
|
|
14
|
+
from project_summary import (
|
|
15
|
+
generate_project_summary,
|
|
16
|
+
save_summary,
|
|
17
|
+
load_summary,
|
|
18
|
+
delete_summary,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
app = Flask(__name__, static_folder=os.path.join(os.path.dirname(__file__), "ui"))
|
|
22
|
+
|
|
23
|
+
TOOL_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
24
|
+
RECENT_FILE = os.path.join(TOOL_DIR, "recent_projects.json")
|
|
25
|
+
MAX_RECENT = 8
|
|
26
|
+
|
|
27
|
+
# Session-only Groq key (not saved to disk)
|
|
28
|
+
session_groq_key = ""
|
|
29
|
+
|
|
30
|
+
# ── Recent projects ────────────────────────────────────────────
|
|
31
|
+
def load_recent():
|
|
32
|
+
if os.path.isfile(RECENT_FILE):
|
|
33
|
+
try:
|
|
34
|
+
with open(RECENT_FILE, "r") as f:
|
|
35
|
+
return json.load(f)
|
|
36
|
+
except Exception:
|
|
37
|
+
return []
|
|
38
|
+
return []
|
|
39
|
+
|
|
40
|
+
def save_recent(project_path, project_name):
|
|
41
|
+
recent = load_recent()
|
|
42
|
+
recent = [r for r in recent if r["path"] != project_path]
|
|
43
|
+
recent.insert(0, {"path": project_path, "name": project_name})
|
|
44
|
+
recent = recent[:MAX_RECENT]
|
|
45
|
+
with open(RECENT_FILE, "w") as f:
|
|
46
|
+
json.dump(recent, f, indent=2)
|
|
47
|
+
|
|
48
|
+
# ── Routes ─────────────────────────────────────────────────────
|
|
49
|
+
@app.route("/")
|
|
50
|
+
def index():
|
|
51
|
+
return send_from_directory("ui", "index.html")
|
|
52
|
+
|
|
53
|
+
@app.route("/api/recent", methods=["GET"])
|
|
54
|
+
def get_recent():
|
|
55
|
+
recent = load_recent()
|
|
56
|
+
recent = [r for r in recent if os.path.isdir(r["path"])]
|
|
57
|
+
return jsonify({"recent": recent})
|
|
58
|
+
|
|
59
|
+
@app.route("/api/recent/remove", methods=["POST"])
|
|
60
|
+
def remove_recent():
|
|
61
|
+
data = request.json
|
|
62
|
+
path = data.get("path", "")
|
|
63
|
+
recent = load_recent()
|
|
64
|
+
recent = [r for r in recent if r["path"] != path]
|
|
65
|
+
with open(RECENT_FILE, "w") as f:
|
|
66
|
+
json.dump(recent, f, indent=2)
|
|
67
|
+
delete_summary(TOOL_DIR, path)
|
|
68
|
+
return jsonify({"ok": True})
|
|
69
|
+
|
|
70
|
+
@app.route("/api/groq-key", methods=["POST"])
|
|
71
|
+
def set_groq_key():
|
|
72
|
+
global session_groq_key
|
|
73
|
+
data = request.json
|
|
74
|
+
key = data.get("key", "").strip()
|
|
75
|
+
if not key:
|
|
76
|
+
return jsonify({"error": "Key is empty"}), 400
|
|
77
|
+
if not key.startswith("gsk_"):
|
|
78
|
+
return jsonify({"error": "Invalid key — Groq keys start with gsk_"}), 400
|
|
79
|
+
session_groq_key = key
|
|
80
|
+
return jsonify({"ok": True})
|
|
81
|
+
|
|
82
|
+
@app.route("/api/groq-key/status", methods=["GET"])
|
|
83
|
+
def groq_key_status():
|
|
84
|
+
return jsonify({"has_key": bool(session_groq_key)})
|
|
85
|
+
|
|
86
|
+
@app.route("/api/scan", methods=["POST"])
|
|
87
|
+
def scan():
|
|
88
|
+
data = request.json
|
|
89
|
+
project_path = data.get("project_path", "").strip()
|
|
90
|
+
project_path = os.path.expanduser(project_path)
|
|
91
|
+
project_path = os.path.abspath(project_path)
|
|
92
|
+
|
|
93
|
+
if not os.path.isdir(project_path):
|
|
94
|
+
return jsonify({"error": f"Folder not found: {project_path}"}), 400
|
|
95
|
+
|
|
96
|
+
project_name = os.path.basename(project_path)
|
|
97
|
+
tree_lines = build_folder_tree(project_path)
|
|
98
|
+
files = collect_files(project_path)
|
|
99
|
+
patterns = load_contextignore(project_path)
|
|
100
|
+
save_recent(project_path, project_name)
|
|
101
|
+
|
|
102
|
+
file_blocks = [build_file_block(path, content) for path, content in files]
|
|
103
|
+
header = build_header(project_name, project_path, "\n".join(tree_lines), 1)
|
|
104
|
+
full_text = header + "\n".join(file_blocks)
|
|
105
|
+
total_tokens = count_tokens(full_text)
|
|
106
|
+
|
|
107
|
+
existing_summary = load_summary(TOOL_DIR, project_path)
|
|
108
|
+
|
|
109
|
+
return jsonify({
|
|
110
|
+
"project_name": project_name,
|
|
111
|
+
"project_path": project_path,
|
|
112
|
+
"tree": tree_lines,
|
|
113
|
+
"files": [{"path": p, "tokens": count_tokens(c)} for p, c in files],
|
|
114
|
+
"total_tokens": total_tokens,
|
|
115
|
+
"file_count": len(files),
|
|
116
|
+
"contextignore_rules": patterns,
|
|
117
|
+
"existing_summary": existing_summary,
|
|
118
|
+
"groq_key_set": bool(session_groq_key)
|
|
119
|
+
})
|
|
120
|
+
|
|
121
|
+
@app.route("/api/summary/generate", methods=["POST"])
|
|
122
|
+
def generate_summary():
|
|
123
|
+
global session_groq_key
|
|
124
|
+
data = request.json
|
|
125
|
+
project_path = os.path.abspath(
|
|
126
|
+
os.path.expanduser(data.get("project_path", "")))
|
|
127
|
+
|
|
128
|
+
if not session_groq_key:
|
|
129
|
+
return jsonify({"error": "No Groq API key set for this session"}), 400
|
|
130
|
+
|
|
131
|
+
if not os.path.isdir(project_path):
|
|
132
|
+
return jsonify({"error": "Folder not found"}), 400
|
|
133
|
+
|
|
134
|
+
project_name = os.path.basename(project_path)
|
|
135
|
+
files = collect_files(project_path)
|
|
136
|
+
|
|
137
|
+
try:
|
|
138
|
+
summary = generate_project_summary(project_name, files, session_groq_key)
|
|
139
|
+
save_summary(TOOL_DIR, project_path, summary)
|
|
140
|
+
return jsonify({"summary": summary})
|
|
141
|
+
except Exception as e:
|
|
142
|
+
return jsonify({"error": str(e)}), 500
|
|
143
|
+
|
|
144
|
+
@app.route("/api/summary/save", methods=["POST"])
|
|
145
|
+
def save_summary_route():
|
|
146
|
+
data = request.json
|
|
147
|
+
project_path = os.path.abspath(
|
|
148
|
+
os.path.expanduser(data.get("project_path", "")))
|
|
149
|
+
summary = data.get("summary", "").strip()
|
|
150
|
+
if not summary:
|
|
151
|
+
return jsonify({"error": "Summary is empty"}), 400
|
|
152
|
+
save_summary(TOOL_DIR, project_path, summary)
|
|
153
|
+
return jsonify({"ok": True})
|
|
154
|
+
|
|
155
|
+
@app.route("/api/summary/delete", methods=["POST"])
|
|
156
|
+
def delete_summary_route():
|
|
157
|
+
data = request.json
|
|
158
|
+
project_path = os.path.abspath(
|
|
159
|
+
os.path.expanduser(data.get("project_path", "")))
|
|
160
|
+
delete_summary(TOOL_DIR, project_path)
|
|
161
|
+
return jsonify({"ok": True})
|
|
162
|
+
|
|
163
|
+
@app.route("/api/generate", methods=["POST"])
|
|
164
|
+
def generate():
|
|
165
|
+
data = request.json
|
|
166
|
+
project_path = os.path.abspath(
|
|
167
|
+
os.path.expanduser(data.get("project_path", "")))
|
|
168
|
+
task = data.get("task", "").strip()
|
|
169
|
+
excluded = set(data.get("excluded", []))
|
|
170
|
+
annotations = data.get("annotations", {})
|
|
171
|
+
include_summary = data.get("include_summary", True)
|
|
172
|
+
|
|
173
|
+
if not os.path.isdir(project_path):
|
|
174
|
+
return jsonify({"error": "Folder not found"}), 400
|
|
175
|
+
|
|
176
|
+
project_name = os.path.basename(project_path)
|
|
177
|
+
tree_lines = build_folder_tree(project_path)
|
|
178
|
+
all_files = collect_files(project_path)
|
|
179
|
+
files = [(p, c) for p, c in all_files if p not in excluded]
|
|
180
|
+
|
|
181
|
+
summary_text = ""
|
|
182
|
+
if include_summary:
|
|
183
|
+
summary_text = load_summary(TOOL_DIR, project_path) or ""
|
|
184
|
+
|
|
185
|
+
output_dir = os.path.join(TOOL_DIR, "output")
|
|
186
|
+
os.makedirs(output_dir, exist_ok=True)
|
|
187
|
+
|
|
188
|
+
file_blocks = [
|
|
189
|
+
build_file_block(path, content, annotations.get(path, ""))
|
|
190
|
+
for path, content in files
|
|
191
|
+
]
|
|
192
|
+
header = build_header(project_name, project_path, "\n".join(tree_lines), 1)
|
|
193
|
+
|
|
194
|
+
if summary_text:
|
|
195
|
+
header = f"## PROJECT SUMMARY\n\n{summary_text}\n\n{header}"
|
|
196
|
+
|
|
197
|
+
prompts = split_into_prompts(header, file_blocks, task, project_name)
|
|
198
|
+
saved_files = save_prompts(prompts, output_dir)
|
|
199
|
+
|
|
200
|
+
parts = []
|
|
201
|
+
for i, filepath in enumerate(saved_files):
|
|
202
|
+
with open(filepath, "r") as f:
|
|
203
|
+
content = f.read()
|
|
204
|
+
parts.append({
|
|
205
|
+
"part": i + 1,
|
|
206
|
+
"filename": os.path.basename(filepath),
|
|
207
|
+
"tokens": count_tokens(content),
|
|
208
|
+
"content": content
|
|
209
|
+
})
|
|
210
|
+
|
|
211
|
+
return jsonify({
|
|
212
|
+
"total_parts": len(parts),
|
|
213
|
+
"output_dir": output_dir,
|
|
214
|
+
"parts": parts
|
|
215
|
+
})
|
|
216
|
+
|
|
217
|
+
if __name__ == "__main__":
|
|
218
|
+
print("\nContextForLLM is running.")
|
|
219
|
+
print("Open this in your browser: http://localhost:5000\n")
|
|
220
|
+
app.run(debug=False, port=5000)
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
|