query3ai 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
query3ai-0.1.1/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Vivek V Pai
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,255 @@
1
+ Metadata-Version: 2.4
2
+ Name: query3ai
3
+ Version: 0.1.1
4
+ Summary: Query3AI: A multi-agent system combining document structure extraction, relevance filtering, and reasoning with Neo4j.
5
+ Author-email: Query3AI Author <author@example.com>
6
+ Project-URL: Homepage, https://github.com/vivekvpai/Query3AI
7
+ Classifier: Programming Language :: Python :: 3
8
+ Classifier: Operating System :: OS Independent
9
+ Requires-Python: >=3.8
10
+ Description-Content-Type: text/markdown
11
+ License-File: LICENSE
12
+ Requires-Dist: typer
13
+ Requires-Dist: rich
14
+ Requires-Dist: pymupdf
15
+ Requires-Dist: python-docx
16
+ Requires-Dist: python-dotenv>=1.0.0
17
+ Requires-Dist: prompt_toolkit>=3.0.0
18
+ Requires-Dist: readchar>=4.0.0
19
+ Requires-Dist: ollama
20
+ Requires-Dist: neo4j
21
+ Requires-Dist: groq
22
+ Dynamic: license-file
23
+
24
+ # Query3AI
25
+
26
+ > An intelligent, local-first document query system powered by a 3-Agent AI pipeline and Neo4j graph storage.
27
+
28
+ ---
29
+
30
+ ## What Is Query3AI?
31
+
32
+ Query3AI lets you ingest documents (PDF, DOCX, TXT, MD) and query them in natural language. It does not flatten your documents into a pile of text chunks like standard AI tools. It reads the structure, builds a knowledge graph, and reasons with three specialised AI agents — one to organise, one to filter, one to answer.
33
+
34
+ ```bash
35
+ # Install once
36
+ pip install query3ai
37
+
38
+ # Initialize workspace (creates ~/.query3ai with config)
39
+ query3ai init
40
+
41
+ # Start Neo4j
42
+ query3ai start-db
43
+
44
+ # Ingest documents
45
+ query3ai ingest report.pdf
46
+
47
+ # Query with interactive chat
48
+ query3ai chat
49
+ ```
50
+
51
+ No cloud required. No API keys needed for local models. Runs on a standard laptop.
52
+
53
+ ---
54
+
55
+ ## Installation
56
+
57
+ ### From PyPI (Recommended)
58
+
59
+ ```bash
60
+ pip install query3ai
61
+ ```
62
+
63
+ ### From Source
64
+
65
+ ```bash
66
+ git clone https://github.com/vivekvpai/Query3AI.git
67
+ cd Query3AI
68
+ pip install -e .
69
+ ```
70
+
71
+ ---
72
+
73
+ ## Quick Start
74
+
75
+ ### 1. Initialize Query3AI
76
+
77
+ ```bash
78
+ # Global workspace (default - stores config in ~/.query3ai)
79
+ query3ai init
80
+
81
+ # Or local workspace (creates config in current directory)
82
+ query3ai init --local
83
+ ```
84
+
85
+ This creates:
86
+ - `~/.query3ai/docker-compose.yml` - Neo4j configuration
87
+ - `~/.query3ai/.env` - Environment variables
88
+ - `~/.query3ai/config.json` - Model configuration
89
+
90
+ ### 2. Start Neo4j
91
+
92
+ ```bash
93
+ query3ai start-db
94
+ ```
95
+
96
+ Or manually with Docker:
97
+ ```bash
98
+ docker run -p 7687:7687 -p 7474:7474 \
99
+ -e NEO4J_AUTH=neo4j/query3ai \
100
+ neo4j:latest
101
+ ```
102
+
103
+ Default connection:
104
+ - URI: `bolt://localhost:7687`
105
+ - User: `neo4j`
106
+ - Password: `query3ai`
107
+
108
+ ### 3. Start Ollama (Optional - for local models)
109
+
110
+ ```bash
111
+ # Make sure Ollama is running
112
+ ollama serve
113
+
114
+ # Pull the three agent models
115
+ ollama pull phi3.5 # Tree Agent
116
+ ollama pull gemma2:2b # Decision Agent
117
+ ollama pull deepseek-r1:7b # Reasoning Agent
118
+ ```
119
+
120
+ ### 4. Ingest and Query
121
+
122
+ ```bash
123
+ # Ingest a document
124
+ query3ai ingest path/to/document.pdf
125
+
126
+ # Start interactive chat
127
+ query3ai chat
128
+ ```
129
+
130
+ ---
131
+
132
+ ## CLI Commands
133
+
134
+ | Command | Description |
135
+ |---|---|
136
+ | `query3ai init` | Initialize workspace in ~/.query3ai |
137
+ | `query3ai init --local` | Initialize workspace in current directory |
138
+ | `query3ai start-db` | Start Neo4j via docker-compose |
139
+ | `query3ai stop-db` | Stop Neo4j |
140
+ | `query3ai ingest <file>` | Ingest a PDF, DOCX, TXT, or MD file |
141
+ | `query3ai ask "<question>"` | Query all ingested documents |
142
+ | `query3ai ask "<question>" --cloud` | Query using cloud models |
143
+ | `query3ai list` | List all ingested documents |
144
+ | `query3ai inspect <doc_id>` | Inspect a document's tree structure |
145
+ | `query3ai delete <doc_id>` | Delete a document and all its nodes |
146
+ | `query3ai chat` | Start interactive TUI chat |
147
+
148
+ ---
149
+
150
+ ## The 3-Agent Pipeline
151
+
152
+ ```
153
+ Document
154
+
155
+
156
+ [Agent 1 — Tree AI] phi3.5 / qwen3.5:cloud
157
+ Builds hierarchical tree: Document → Sections → Chunks
158
+
159
+
160
+ Neo4j Graph Database
161
+
162
+
163
+ [Agent 2 — Decision AI] gemma2:2b / kimi-k2.5:cloud
164
+ Filters sections by relevance to the query (YES/NO)
165
+
166
+
167
+ [Agent 3 — Reasoning AI] deepseek-r1:7b / glm-5:cloud
168
+ Generates final answer from filtered context only
169
+
170
+
171
+ Answer + Source Sections
172
+ ```
173
+
174
+ ---
175
+
176
+ ## Model Configuration
177
+
178
+ Edit `~/.query3ai/config.json` to customize models:
179
+
180
+ ```json
181
+ {
182
+ "MODEL_PROVIDER": "groq",
183
+ "TREE_MODEL": "phi3.5:3.8b",
184
+ "DECISION_MODEL": "gemma2:2b",
185
+ "REASONING_MODEL": "deepseek-r1:7b"
186
+ }
187
+ ```
188
+
189
+ | Provider | Description | Privacy | Speed | Cost |
190
+ |---|---|---|---|---|
191
+ | `ollama_local` | Local Ollama models | ✅ Fully private | ❌ Slow (CPU) | ✅ Free |
192
+ | `ollama_cloud` | Cloud Ollama models | ⚠️ External | ✅ Fast | Varies |
193
+ | `groq` | Groq API (recommended) | ⚠️ External | ✅ Fastest | Free tier |
194
+
195
+ For Groq, add your API key to `~/.query3ai/.env`:
196
+
197
+ ```bash
198
+ GROQ_API_KEY=your_groq_api_key_here
199
+ ```
200
+
201
+ ---
202
+
203
+ ## Interactive Chat
204
+
205
+ Start the TUI chat interface:
206
+
207
+ ```bash
208
+ query3ai chat
209
+ ```
210
+
211
+ ### Slash Commands
212
+
213
+ | Command | Action |
214
+ |---|---|
215
+ | `/about` | Learn about Query3AI |
216
+ | `/help` | Display all available commands |
217
+ | `/ingest <path>` | Ingest a new document |
218
+ | `/listdocs` | List all indexed documents |
219
+ | `/list` | Show total sections and chunks |
220
+ | `/deletedoc` | Remove a document from database |
221
+ | `/cleanupdocs` | Delete all documents |
222
+ | `/cleanupresorce` | Clean up temporary files |
223
+ | `/clear` | Clear terminal |
224
+ | `/exit` | Exit chat |
225
+
226
+ ---
227
+
228
+ ## Requirements
229
+
230
+ | Component | Minimum | Recommended |
231
+ |---|---|---|
232
+ | RAM | 8 GB | 16 GB |
233
+ | CPU | 4 cores | 8 cores |
234
+ | GPU | Not required | Optional |
235
+ | Python | 3.8+ | 3.10+ |
236
+ | Storage | 10 GB free | 20 GB free |
237
+
238
+ ---
239
+
240
+ ## Tech Stack
241
+
242
+ | Layer | Technology |
243
+ |---|---|
244
+ | CLI | Typer + Rich |
245
+ | AI Inference | Ollama, Groq |
246
+ | Local Models | phi3.5, gemma2:2b, deepseek-r1:7b |
247
+ | Cloud Models | qwen3.5:cloud, kimi-k2.5:cloud, glm-5:cloud |
248
+ | Graph Database | Neo4j |
249
+ | Document Parsing | PyMuPDF, python-docx |
250
+
251
+ ---
252
+
253
+ ## License
254
+
255
+ MIT
@@ -0,0 +1,232 @@
1
+ # Query3AI
2
+
3
+ > An intelligent, local-first document query system powered by a 3-Agent AI pipeline and Neo4j graph storage.
4
+
5
+ ---
6
+
7
+ ## What Is Query3AI?
8
+
9
+ Query3AI lets you ingest documents (PDF, DOCX, TXT, MD) and query them in natural language. It does not flatten your documents into a pile of text chunks like standard AI tools. It reads the structure, builds a knowledge graph, and reasons with three specialised AI agents — one to organise, one to filter, one to answer.
10
+
11
+ ```bash
12
+ # Install once
13
+ pip install query3ai
14
+
15
+ # Initialize workspace (creates ~/.query3ai with config)
16
+ query3ai init
17
+
18
+ # Start Neo4j
19
+ query3ai start-db
20
+
21
+ # Ingest documents
22
+ query3ai ingest report.pdf
23
+
24
+ # Query with interactive chat
25
+ query3ai chat
26
+ ```
27
+
28
+ No cloud required. No API keys needed for local models. Runs on a standard laptop.
29
+
30
+ ---
31
+
32
+ ## Installation
33
+
34
+ ### From PyPI (Recommended)
35
+
36
+ ```bash
37
+ pip install query3ai
38
+ ```
39
+
40
+ ### From Source
41
+
42
+ ```bash
43
+ git clone https://github.com/vivekvpai/Query3AI.git
44
+ cd Query3AI
45
+ pip install -e .
46
+ ```
47
+
48
+ ---
49
+
50
+ ## Quick Start
51
+
52
+ ### 1. Initialize Query3AI
53
+
54
+ ```bash
55
+ # Global workspace (default - stores config in ~/.query3ai)
56
+ query3ai init
57
+
58
+ # Or local workspace (creates config in current directory)
59
+ query3ai init --local
60
+ ```
61
+
62
+ This creates:
63
+ - `~/.query3ai/docker-compose.yml` - Neo4j configuration
64
+ - `~/.query3ai/.env` - Environment variables
65
+ - `~/.query3ai/config.json` - Model configuration
66
+
67
+ ### 2. Start Neo4j
68
+
69
+ ```bash
70
+ query3ai start-db
71
+ ```
72
+
73
+ Or manually with Docker:
74
+ ```bash
75
+ docker run -p 7687:7687 -p 7474:7474 \
76
+ -e NEO4J_AUTH=neo4j/query3ai \
77
+ neo4j:latest
78
+ ```
79
+
80
+ Default connection:
81
+ - URI: `bolt://localhost:7687`
82
+ - User: `neo4j`
83
+ - Password: `query3ai`
84
+
85
+ ### 3. Start Ollama (Optional - for local models)
86
+
87
+ ```bash
88
+ # Make sure Ollama is running
89
+ ollama serve
90
+
91
+ # Pull the three agent models
92
+ ollama pull phi3.5 # Tree Agent
93
+ ollama pull gemma2:2b # Decision Agent
94
+ ollama pull deepseek-r1:7b # Reasoning Agent
95
+ ```
96
+
97
+ ### 4. Ingest and Query
98
+
99
+ ```bash
100
+ # Ingest a document
101
+ query3ai ingest path/to/document.pdf
102
+
103
+ # Start interactive chat
104
+ query3ai chat
105
+ ```
106
+
107
+ ---
108
+
109
+ ## CLI Commands
110
+
111
+ | Command | Description |
112
+ |---|---|
113
+ | `query3ai init` | Initialize workspace in ~/.query3ai |
114
+ | `query3ai init --local` | Initialize workspace in current directory |
115
+ | `query3ai start-db` | Start Neo4j via docker-compose |
116
+ | `query3ai stop-db` | Stop Neo4j |
117
+ | `query3ai ingest <file>` | Ingest a PDF, DOCX, TXT, or MD file |
118
+ | `query3ai ask "<question>"` | Query all ingested documents |
119
+ | `query3ai ask "<question>" --cloud` | Query using cloud models |
120
+ | `query3ai list` | List all ingested documents |
121
+ | `query3ai inspect <doc_id>` | Inspect a document's tree structure |
122
+ | `query3ai delete <doc_id>` | Delete a document and all its nodes |
123
+ | `query3ai chat` | Start interactive TUI chat |
124
+
125
+ ---
126
+
127
+ ## The 3-Agent Pipeline
128
+
129
+ ```
130
+ Document
131
+
132
+
133
+ [Agent 1 — Tree AI] phi3.5 / qwen3.5:cloud
134
+ Builds hierarchical tree: Document → Sections → Chunks
135
+
136
+
137
+ Neo4j Graph Database
138
+
139
+
140
+ [Agent 2 — Decision AI] gemma2:2b / kimi-k2.5:cloud
141
+ Filters sections by relevance to the query (YES/NO)
142
+
143
+
144
+ [Agent 3 — Reasoning AI] deepseek-r1:7b / glm-5:cloud
145
+ Generates final answer from filtered context only
146
+
147
+
148
+ Answer + Source Sections
149
+ ```
150
+
151
+ ---
152
+
153
+ ## Model Configuration
154
+
155
+ Edit `~/.query3ai/config.json` to customize models:
156
+
157
+ ```json
158
+ {
159
+ "MODEL_PROVIDER": "groq",
160
+ "TREE_MODEL": "phi3.5:3.8b",
161
+ "DECISION_MODEL": "gemma2:2b",
162
+ "REASONING_MODEL": "deepseek-r1:7b"
163
+ }
164
+ ```
165
+
166
+ | Provider | Description | Privacy | Speed | Cost |
167
+ |---|---|---|---|---|
168
+ | `ollama_local` | Local Ollama models | ✅ Fully private | ❌ Slow (CPU) | ✅ Free |
169
+ | `ollama_cloud` | Cloud Ollama models | ⚠️ External | ✅ Fast | Varies |
170
+ | `groq` | Groq API (recommended) | ⚠️ External | ✅ Fastest | Free tier |
171
+
172
+ For Groq, add your API key to `~/.query3ai/.env`:
173
+
174
+ ```bash
175
+ GROQ_API_KEY=your_groq_api_key_here
176
+ ```
177
+
178
+ ---
179
+
180
+ ## Interactive Chat
181
+
182
+ Start the TUI chat interface:
183
+
184
+ ```bash
185
+ query3ai chat
186
+ ```
187
+
188
+ ### Slash Commands
189
+
190
+ | Command | Action |
191
+ |---|---|
192
+ | `/about` | Learn about Query3AI |
193
+ | `/help` | Display all available commands |
194
+ | `/ingest <path>` | Ingest a new document |
195
+ | `/listdocs` | List all indexed documents |
196
+ | `/list` | Show total sections and chunks |
197
+ | `/deletedoc` | Remove a document from database |
198
+ | `/cleanupdocs` | Delete all documents |
199
+ | `/cleanupresorce` | Clean up temporary files |
200
+ | `/clear` | Clear terminal |
201
+ | `/exit` | Exit chat |
202
+
203
+ ---
204
+
205
+ ## Requirements
206
+
207
+ | Component | Minimum | Recommended |
208
+ |---|---|---|
209
+ | RAM | 8 GB | 16 GB |
210
+ | CPU | 4 cores | 8 cores |
211
+ | GPU | Not required | Optional |
212
+ | Python | 3.8+ | 3.10+ |
213
+ | Storage | 10 GB free | 20 GB free |
214
+
215
+ ---
216
+
217
+ ## Tech Stack
218
+
219
+ | Layer | Technology |
220
+ |---|---|
221
+ | CLI | Typer + Rich |
222
+ | AI Inference | Ollama, Groq |
223
+ | Local Models | phi3.5, gemma2:2b, deepseek-r1:7b |
224
+ | Cloud Models | qwen3.5:cloud, kimi-k2.5:cloud, glm-5:cloud |
225
+ | Graph Database | Neo4j |
226
+ | Document Parsing | PyMuPDF, python-docx |
227
+
228
+ ---
229
+
230
+ ## License
231
+
232
+ MIT
@@ -0,0 +1,35 @@
1
+ [build-system]
2
+ requires = ["setuptools>=61.0"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "query3ai"
7
+ version = "0.1.1"
8
+ authors = [
9
+ { name="Query3AI Author", email="author@example.com" },
10
+ ]
11
+ description = "Query3AI: A multi-agent system combining document structure extraction, relevance filtering, and reasoning with Neo4j."
12
+ readme = "README.md"
13
+ requires-python = ">=3.8"
14
+ classifiers = [
15
+ "Programming Language :: Python :: 3",
16
+ "Operating System :: OS Independent",
17
+ ]
18
+ dependencies = [
19
+ "typer",
20
+ "rich",
21
+ "pymupdf",
22
+ "python-docx",
23
+ "python-dotenv>=1.0.0",
24
+ "prompt_toolkit>=3.0.0",
25
+ "readchar>=4.0.0",
26
+ "ollama",
27
+ "neo4j",
28
+ "groq"
29
+ ]
30
+
31
+ [project.urls]
32
+ "Homepage" = "https://github.com/vivekvpai/Query3AI"
33
+
34
+ [project.scripts]
35
+ query3ai = "query3ai.cli.commands:app"
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
File without changes
File without changes