crystalwindow 4.2__py3-none-any.whl → 4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
crystalwindow/ai.py CHANGED
@@ -1,19 +1,20 @@
1
1
  # ==========================================================
2
- # CrystalAI v0.6Unified Engine
2
+ # CrystalAI v0.7Self-Contained Symbolic Engine
3
3
  # ----------------------------------------------------------
4
- # Combines:
5
- # - v0.4: auto-fix, AST parser, docs index, diff tools
6
- # - v0.5: personality, library ingestion, safe key check
7
- # - Groq fallback, local fallback, file analysis
4
+ # Focus: Pure Python 'thinking' (Knowledge Graph/Rule-Based)
5
+ # Changes:
6
+ # - Refactored _local_ai to _symbolic_engine
7
+ # - Added a simple knowledge base (K-Graph)
8
+ # - Added context/intent matching for 'thinking'
9
+ # - Removed Groq/External API integration
8
10
  # ==========================================================
9
11
 
10
12
  import os
11
13
  import ast
12
14
  import difflib
13
- from typing import Optional, Dict, Any
14
- import requests
15
- import groq
16
-
15
+ from typing import Optional, Dict, Any, List
16
+ # Removed requests and groq imports as they are no longer needed
17
+ # Removed import groq (if it was present)
17
18
 
18
19
  # ==========================================================
19
20
  # Response Wrapper
@@ -31,42 +32,34 @@ class CrystalAIResponse:
31
32
  # MAIN ENGINE
32
33
  # ==========================================================
33
34
  class AI:
34
- DEFAULT_MODEL = "llama-3.1-8b"
35
+ DEFAULT_MODEL = "PURE_SYMBOLIC"
35
36
  DEFAULT_PERSONALITY = (
36
- "You are CrystalWindow AI. You help users with Python code, "
37
- "debugging, errors, docs, and file analysis. "
38
- "Be friendly, technical, clear, and precise."
37
+ "You are CrystalMind AI, a completely self-coded, symbolic engine. "
38
+ "I process information using an internal rule-base and knowledge graph. "
39
+ "I can reason about simple code and defined concepts."
39
40
  )
40
- PLACEHOLDER_KEY = "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"
41
+ # Key is ignored in this self-contained mode, but kept for signature consistency
42
+ PLACEHOLDER_KEY = "PURE_SYMBOLIC_KEY_IGNORED"
41
43
 
42
44
  # ------------------------------------------------------
43
45
  def __init__(self, key=None, model=None):
44
- # --- KEY VALIDATION ---
45
- if not key or len(key) < 20 or " " in key:
46
- print("[CrystalAI] Warning: Invalid or missing key → using placeholder. To get a Fixed Key go to 'console.groq.com/keys'")
47
- self.key = self.PLACEHOLDER_KEY
48
- else:
49
- self.key = key
50
-
51
- # --- MODEL VALIDATION ---
52
- if not model or not isinstance(model, str) or len(model) < 3:
53
- print("[CrystalAI] Unknown model → using default.")
54
- self.model = self.DEFAULT_MODEL
55
- else:
56
- self.model = model
57
-
58
- # Persona
46
+ # --- KEY/MODEL SETUP (for consistency, though ignored) ---
47
+ self.key = self.PLACEHOLDER_KEY
48
+ self.model = self.DEFAULT_MODEL
59
49
  self.personality = self.DEFAULT_PERSONALITY
60
50
 
61
- # Library knowledge (loaded .py files)
62
- self.library_context = ""
51
+ # --- PURE AI KNOWLEDGE BASE (The 'Thinking' Core) ---
52
+ self.knowledge_graph: Dict[str, Any] = self._build_knowledge_graph()
63
53
 
64
54
  # v0.4 memory system (optional)
65
- self.memory = []
55
+ self.memory: List[Dict[str, str]] = []
66
56
  self.use_memory = True
67
57
 
68
58
  # v0.4 toggle for forcing local engine
69
- self.force_local = False
59
+ self.force_local = True # Always True for a self-contained AI
60
+
61
+ # Library knowledge (loaded .py files)
62
+ self.library_context = ""
70
63
 
71
64
  # ==========================================================
72
65
  # PERSONALITY
@@ -90,6 +83,7 @@ class AI:
90
83
  def index_library(self, folder):
91
84
  """
92
85
  Load all Python files as context for smarter answers.
86
+ (Context is used in prompt but processed by local engine's rules)
93
87
  """
94
88
  out = []
95
89
  if not os.path.exists(folder):
@@ -151,102 +145,102 @@ class AI:
151
145
  self.memory.pop(0)
152
146
 
153
147
  # ==========================================================
154
- # LOCAL FALLBACK AI (modern, useful, not dumb)
148
+ # PURE AI KNOWLEDGE BASE (Self-Contained 'Knowledge')
149
+ # ==========================================================
150
+ def _build_knowledge_graph(self) -> Dict[str, Any]:
151
+ """
152
+ Defines the internal knowledge the pure AI can reason with.
153
+ """
154
+ return {
155
+ "python": {
156
+ "desc": "A high-level, interpreted programming language.",
157
+ "keywords": ["language", "interpreted", "high-level"],
158
+ "syntax": {
159
+ "if_statement": "if condition: ... else: ...",
160
+ "loop": "for item in iterable: ..."
161
+ }
162
+ },
163
+ "ast": {
164
+ "desc": "Abstract Syntax Tree. Used for parsing code structure.",
165
+ "keywords": ["parsing", "code", "structure", "tree"]
166
+ },
167
+ "fix_code": {
168
+ "rule": "look for SyntaxError, especially missing colons or mismatched brackets",
169
+ "keywords": ["fix", "error", "bug", "syntax"]
170
+ }
171
+ }
172
+
173
+ # ==========================================================
174
+ # PURE AI 'THINKING' ENGINE (Replaces _local_ai)
155
175
  # ==========================================================
156
- def _local_ai(self, prompt, file_data):
176
+ def _symbolic_engine(self, prompt: str, file_data: Optional[str]) -> str:
157
177
  """
158
- Improved fallback mode:
159
- - If file provided → real AST analysis
160
- - If general question → helpful offline response
161
- - No more random jokes or irrelevant "forgot a colon"
178
+ Simulates 'thinking' using only internal rules and the knowledge graph.
162
179
  """
180
+ output = ["[SymbolicEngine] Processing request..."]
181
+ lower_prompt = prompt.lower()
163
182
 
164
- # --- If file provided, try real Python AST parsing ---
183
+ # --- Stage 1: File Analysis (Real Python AST) ---
165
184
  if file_data and not file_data.startswith("[CrystalAI]"):
185
+ output.append("\n[Stage 1: Code Parsing]")
166
186
  try:
167
187
  ast.parse(file_data)
168
- return (
169
- "[LocalAI] I was able to parse the file successfully.\n"
170
- "There are no syntax errors.\n"
171
- "Ask me to explain, summarize, refactor, or improve something."
172
- )
188
+ output.append("✅ **No Syntax Errors Detected** (via AST).")
189
+ output.append("The code is structurally sound. Ask for refactoring or explanation.")
190
+ return "\n".join(output)
173
191
  except SyntaxError as se:
192
+ # Use the built-in fix rule from the knowledge graph
193
+ fix_rule = self.knowledge_graph["fix_code"]["rule"]
174
194
  lineno = se.lineno or 0
175
195
  offset = se.offset or 0
176
196
  msg = (
177
- f"[LocalAI] SyntaxError detected:\n"
197
+ f" **SyntaxError Detected** (via AST):\n"
178
198
  f"• Message: {se.msg}\n"
179
199
  f"• Line: {lineno}\n"
180
- f"• Column: {offset}\n\n"
200
+ f"• Column: {offset}\n"
201
+ f"• Rule suggestion: {fix_rule}"
181
202
  )
182
- snippet = self._snippet(file_data, lineno)
183
- return msg + snippet
184
-
185
- # --- General offline fallback (safe + useful) ---
186
- lower = prompt.lower()
187
-
188
- # Code-related queries
189
- if "fix" in lower or "error" in lower or "bug" in lower:
190
- return (
191
- "[LocalAI] I can't reach Groq right now,\n"
192
- "but here's what I can do offline:\n"
193
- "• Check for syntax problems if you provide a file\n"
194
- "• Suggest common Python mistakes\n\n"
195
- "Tip: try again once Groq is reachable for full debugging."
196
- )
197
-
198
- # Regular questions (time, math, writing, etc.)
199
- if any(x in lower for x in ["time", "story", "game", "explain", "python"]):
200
- return (
201
- "[LocalAI] I'm offline, but I can still give general help:\n"
202
- "- Ask me Python questions\n"
203
- "- Ask for concepts, writing tips, structure examples\n"
204
- "- Provide a file and I can analyze it with AST\n"
205
- )
206
-
207
- # Catch-all fallback
208
- return (
209
- "[LocalAI] Offline mode enabled.\n"
210
- "I can still analyze Python code and help with general knowledge.\n"
211
- "Once online, Groq will give full intelligent responses."
212
- )
203
+ output.append(msg)
204
+ output.append(self._snippet(file_data, lineno))
205
+ return "\n".join(output)
206
+
207
+ # --- Stage 2: Knowledge Graph Lookup (Rule-Based Reasoning) ---
208
+ output.append("\n[Stage 2: Symbolic Lookup]")
209
+
210
+ # Check for concepts the AI 'knows'
211
+ found_concept = False
212
+ for key, knowledge in self.knowledge_graph.items():
213
+ if key in lower_prompt or any(k in lower_prompt for k in knowledge.get("keywords", [])):
214
+ if key == "fix_code": continue # Already handled in Stage 1
215
+
216
+ output.append(f"🧠 Found Concept: **{key.upper()}**")
217
+ output.append(f"Description: {knowledge.get('desc', 'No detailed description.')}")
218
+
219
+ # Simple reasoning about related syntax
220
+ if 'syntax' in knowledge:
221
+ output.append("Related Syntax:")
222
+ for syn, code in knowledge['syntax'].items():
223
+ output.append(f" - {syn.replace('_', ' ')}: `{code}`")
224
+
225
+ found_concept = True
226
+ break
227
+
228
+ if not found_concept:
229
+ output.append("❓ Concept Unknown: I am limited to my internal knowledge base (Python, AST, Fix Code).")
230
+ output.append("Please ask about a defined concept or provide a file for AST analysis.")
231
+
232
+
233
+ return "\n".join(output)
213
234
 
214
235
  # ==========================================================
215
- # ASK (Groq fallback)
236
+ # ASK (Pure Symbolic Engine)
216
237
  # ==========================================================
217
238
  def ask(self, text, file=None):
218
239
  file_data = self._read_file(file)
219
240
  prompt = self._build_prompt(text, file_data)
220
-
221
- # Try to call Groq normally
222
- try:
223
- url = "https://api.groq.com/openai/v1/chat/completions"
224
- headers = {
225
- "Authorization": f"Bearer {self.key}",
226
- "Content-Type": "application/json"
227
- }
228
-
229
- payload = {
230
- "model": "llama-3.3-70b-versatile",
231
- "messages": [
232
- {"role": "system", "content": self.personality},
233
- {"role": "user", "content": prompt}
234
- ],
235
- "temperature": 0.3
236
- }
237
-
238
- import requests
239
- r = requests.post(url, json=payload, headers=headers, timeout=8)
240
- data = r.json()
241
-
242
- if "error" in data:
243
- raise RuntimeError(data["error"])
244
-
245
- resp = data["choices"][0]["message"]["content"]
246
-
247
- except Exception:
248
- # Groq unreachable → fallback
249
- resp = self._local_ai(prompt, file_data)
241
+
242
+ # We skip the external API call entirely, relying on the symbolic engine
243
+ resp = self._symbolic_engine(prompt, file_data)
250
244
 
251
245
  self._save_memory(text, resp)
252
246
  return CrystalAIResponse(resp)
@@ -326,9 +320,10 @@ class AI:
326
320
  out = []
327
321
  for i in range(start, end):
328
322
  mark = "->" if (i + 1) == lineno else " "
329
- out.append(f"{mark} {i+1:4}: {lines[i]}")
323
+ # Adjusted line formatting for clarity
324
+ out.append(f"{mark} {i+1:<4}: {lines[i]}")
330
325
  return "\n".join(out)
331
326
 
332
327
  # ==========================================================
333
- # END OF ENGINE
334
- # ==========================================================
328
+ # END OF PURE SYMBOLIC ENGINE
329
+ # ==========================================================
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: crystalwindow
3
- Version: 4.2
3
+ Version: 4.3
4
4
  Summary: A Tkinter powered window + GUI toolkit made by Crystal (MEEEEEE)! Easier apps, smoother UI and all-in-one helpers!
5
5
  Home-page: https://pypi.org/project/crystalwindow/
6
6
  Author: CrystalBallyHereXD
@@ -1,6 +1,6 @@
1
1
  crystalwindow/FileHelper.py,sha256=aUnnRG7UwvzJt-idjWjmpwy3RM6nqLlC3-7Bae6Yb94,5471
2
2
  crystalwindow/__init__.py,sha256=a2kdMZ29QZ4kSQ3M8jLvCR6g3OUQxNhdaT3ycxoames,2264
3
- crystalwindow/ai.py,sha256=jQ-O3h0HvDlmopWSZSTP0oEwnxZzrDCTCPNjjslB0IE,12504
3
+ crystalwindow/ai.py,sha256=k-3MVOKZwAIMLB6ifyR0Gcuo4Fh0-JvoDVA9_j-PJSQ,13293
4
4
  crystalwindow/animation.py,sha256=zHjrdBXQeyNaLAuaGPldJueX05OZ5j31YR8NizmR0uQ,427
5
5
  crystalwindow/assets.py,sha256=2Cj0zdhMWo3mWjdr9KU5n-9_8iKj_fJ9uShMFA-27HU,5193
6
6
  crystalwindow/camera.py,sha256=tbn4X-jxMIszAUg3Iu-89gJN5nij0mjPMEzGotcLbJI,712
@@ -33,8 +33,8 @@ crystalwindow/gametests/guitesting.py,sha256=SrOssY5peCQEV6TQ1AiOWtjb9phVGdRzW-Q
33
33
  crystalwindow/gametests/sandbox.py,sha256=Oo2tU2N0y3BPVa6T5vs_h9N6islhQrjSrr_78XLut5I,1007
34
34
  crystalwindow/gametests/squaremove.py,sha256=poP2Zjl2oc2HVvIAgIK34H2jVj6otL4jEdvAOR6L9sI,572
35
35
  crystalwindow/gametests/windowtesting.py,sha256=_9X6wnV1-_X_PtNS-0zu-k209NtFIwAc4vpxLPp7V2o,97
36
- crystalwindow-4.2.dist-info/licenses/LICENSE,sha256=Gt5cJRchdNt0guxyQMHKsATN5PM5mjuDhdO6Gzs9qQc,1096
37
- crystalwindow-4.2.dist-info/METADATA,sha256=_R-c3vsY6VjcDO_LozKHcvZWXHFU0vD80ArnEw-biRg,7338
38
- crystalwindow-4.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
39
- crystalwindow-4.2.dist-info/top_level.txt,sha256=PeQSld4b19XWT-zvbYkvE2Xg8sakIMbDzSzSdOSRN8o,14
40
- crystalwindow-4.2.dist-info/RECORD,,
36
+ crystalwindow-4.3.dist-info/licenses/LICENSE,sha256=Gt5cJRchdNt0guxyQMHKsATN5PM5mjuDhdO6Gzs9qQc,1096
37
+ crystalwindow-4.3.dist-info/METADATA,sha256=gYbWholGWviaRGXpVc1zaOZ0wQ6KWB9lvw2ZdWbBDrY,7338
38
+ crystalwindow-4.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
39
+ crystalwindow-4.3.dist-info/top_level.txt,sha256=PeQSld4b19XWT-zvbYkvE2Xg8sakIMbDzSzSdOSRN8o,14
40
+ crystalwindow-4.3.dist-info/RECORD,,