cnhkmcp 2.1.0__py3-none-any.whl → 2.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/ace.log +0 -0
  2. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/get_knowledgeBase_tool/helpful_functions.py +1 -1
  3. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/main.py +0 -5
  4. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/process_knowledge_base.py +73 -72
  5. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/rag_engine.py +199 -56
  6. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/requirements.txt +2 -7
  7. cnhkmcp/untracked/APP/Tranformer/Transformer.py +1 -1
  8. cnhkmcp/untracked/APP/Tranformer/helpful_functions.py +1 -1
  9. cnhkmcp/untracked/APP/give_me_idea/helpful_functions.py +1 -1
  10. cnhkmcp/untracked/APP/helpful_functions.py +1 -1
  11. cnhkmcp/untracked/APP/hkSimulator/ace_lib.py +2 -2
  12. cnhkmcp/untracked/APP/hkSimulator/autosimulator.py +4 -4
  13. cnhkmcp/untracked/APP/hkSimulator/helpful_functions.py +1 -1
  14. cnhkmcp/untracked/APP/simulator/simulator_wqb.py +1 -1
  15. cnhkmcp/untracked/APP//321/207/342/225/235/320/250/321/205/320/230/320/226/321/204/342/225/225/320/220/321/211/320/221/320/243/321/206/320/261/320/265/helpful_functions.py +1 -1
  16. cnhkmcp/untracked/mcp/321/206/320/246/320/227/321/204/342/225/227/342/225/242/321/210/320/276/342/225/221/321/205/320/255/320/253/321/207/320/231/320/2302_/321/205/320/266/320/222/321/206/320/256/320/254/321/205/320/236/320/257/321/207/320/231/320/230/321/205/320/240/320/277/321/205/320/232/320/270/321/204/342/225/225/320/235/321/204/342/225/221/320/226/321/206/342/225/241/320/237/321/210/320/267/320/230/321/205/320/251/320/270/321/205/342/226/221/342/226/222/321/210/320/277/320/245/321/210/342/224/220/320/251/321/204/342/225/225/320/272/platform_functions.py +2 -2
  17. cnhkmcp/untracked/platform_functions.py +1 -1
  18. {cnhkmcp-2.1.0.dist-info → cnhkmcp-2.1.2.dist-info}/METADATA +1 -1
  19. {cnhkmcp-2.1.0.dist-info → cnhkmcp-2.1.2.dist-info}/RECORD +23 -23
  20. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/vector_db/chroma.sqlite3 +0 -0
  21. {cnhkmcp-2.1.0.dist-info → cnhkmcp-2.1.2.dist-info}/WHEEL +0 -0
  22. {cnhkmcp-2.1.0.dist-info → cnhkmcp-2.1.2.dist-info}/entry_points.txt +0 -0
  23. {cnhkmcp-2.1.0.dist-info → cnhkmcp-2.1.2.dist-info}/licenses/LICENSE +0 -0
  24. {cnhkmcp-2.1.0.dist-info → cnhkmcp-2.1.2.dist-info}/top_level.txt +0 -0
@@ -122,7 +122,7 @@ def save_simulation_result(result: dict) -> None:
122
122
 
123
123
  os.makedirs(folder_path, exist_ok=True)
124
124
 
125
- with open(file_path, "w") as file:
125
+ with open(file_path, "w", encoding="utf-8") as file:
126
126
  json.dump(result, file)
127
127
 
128
128
 
@@ -4,7 +4,6 @@ import base64
4
4
  import tkinter as tk
5
5
  from tkinter import scrolledtext, messagebox, Toplevel
6
6
  from PIL import Image, ImageTk, ImageGrab
7
- import pyautogui
8
7
  from openai import OpenAI
9
8
  import threading
10
9
  import io
@@ -21,14 +20,10 @@ def install_dependencies():
21
20
  # Mapping of package names to their import names (if different)
22
21
  packages = {
23
22
  "openai": "openai",
24
- "pyautogui": "pyautogui",
25
23
  "Pillow": "PIL",
26
- "pyperclip": "pyperclip",
27
- "keyboard": "keyboard",
28
24
  "fastembed": "fastembed",
29
25
  "chromadb": "chromadb",
30
26
  "watchdog": "watchdog",
31
- "urllib3": "urllib3",
32
27
  "pypdf": "pypdf",
33
28
  "python-docx": "docx"
34
29
  }
@@ -16,17 +16,18 @@ if TOOL_DIR not in sys.path:
16
16
  sys.path.insert(0, TOOL_DIR)
17
17
  import ace_lib
18
18
  from fetch_all_operators import fetch_operators, prompt_credentials
19
- from fetch_all_datasets import (
20
- fetch_all_combinations,
21
- fetch_datasets_for_combo,
22
- merge_and_deduplicate,
23
- )
24
19
  from fetch_all_documentation import (
25
20
  fetch_tutorials,
26
21
  fetch_tutorial_pages,
27
22
  fetch_page,
28
23
  _extract_page_id,
29
24
  )
25
+ # Dataset fetching currently disabled per request
26
+ # from fetch_all_datasets import (
27
+ # fetch_all_combinations,
28
+ # fetch_datasets_for_combo,
29
+ # merge_and_deduplicate,
30
+ # )
30
31
 
31
32
 
32
33
  def ensure_knowledge_dir():
@@ -107,62 +108,63 @@ def process_operators(session: ace_lib.SingleSession, knowledge_dir: str):
107
108
  print(f"✓ Created {filename} with {len(category_list)} operators")
108
109
 
109
110
 
110
- def process_datasets(session: ace_lib.SingleSession, dataset_dir: str):
111
- """Fetch datasets and save one JSON per region."""
112
- print("=== Processing Datasets ===")
113
-
114
- print("Fetching valid instrument/region/delay/universe combinations...")
115
- options_df = fetch_all_combinations(session)
116
- if options_df is None or options_df.empty:
117
- print("No simulation options fetched; aborting dataset fetch.")
118
- return
119
-
120
- all_datasets: list[pd.DataFrame] = []
121
- combo_idx = 0
122
-
123
- for _, row in options_df.iterrows():
124
- instrument_type = row.get("InstrumentType")
125
- region = row.get("Region")
126
- delay = row.get("Delay")
127
- universes = row.get("Universe") or []
128
-
129
- for universe in universes:
130
- combo_idx += 1
131
- print(f"[{combo_idx}] {instrument_type} / {region} / D{delay} / {universe}")
132
- try:
133
- df = fetch_datasets_for_combo(session, instrument_type, region, delay, universe)
134
- print(f" -> {len(df)} rows")
135
- all_datasets.append(df)
136
- except Exception as exc:
137
- print(f" -> Failed: {exc}")
138
-
139
- if not all_datasets:
140
- print("No datasets fetched; nothing to save.")
141
- return
142
-
143
- combined_df = pd.concat([df for df in all_datasets if not df.empty], ignore_index=True)
144
- if combined_df.empty:
145
- print("No datasets fetched; nothing to save.")
146
- return
147
-
148
- regions = sorted(combined_df["param_region"].dropna().unique())
149
- print(f"Found regions: {', '.join(regions)}")
150
-
151
- for region in regions:
152
- region_df = combined_df[combined_df["param_region"] == region]
153
- region_unique = merge_and_deduplicate([region_df])
154
-
155
- region_list = []
156
- for _, row in region_unique.iterrows():
157
- record = {col: to_jsonable(row[col]) for col in row.index}
158
- region_list.append(record)
159
-
160
- filename = f"{region.replace(' ', '_').lower()}_datasets.json"
161
- filepath = os.path.join(dataset_dir, filename)
162
- with open(filepath, "w", encoding="utf-8") as f:
163
- json.dump(region_list, f, ensure_ascii=False, indent=2)
164
-
165
- print(f"✓ Created {filename} with {len(region_list)} datasets")
111
+ # Dataset fetching intentionally disabled; keep for potential re-enable.
112
+ # def process_datasets(session: ace_lib.SingleSession, dataset_dir: str):
113
+ # """Fetch datasets and save one JSON per region."""
114
+ # print("=== Processing Datasets ===")
115
+ #
116
+ # print("Fetching valid instrument/region/delay/universe combinations...")
117
+ # options_df = fetch_all_combinations(session)
118
+ # if options_df is None or options_df.empty:
119
+ # print("No simulation options fetched; aborting dataset fetch.")
120
+ # return
121
+ #
122
+ # all_datasets: list[pd.DataFrame] = []
123
+ # combo_idx = 0
124
+ #
125
+ # for _, row in options_df.iterrows():
126
+ # instrument_type = row.get("InstrumentType")
127
+ # region = row.get("Region")
128
+ # delay = row.get("Delay")
129
+ # universes = row.get("Universe") or []
130
+ #
131
+ # for universe in universes:
132
+ # combo_idx += 1
133
+ # print(f"[{combo_idx}] {instrument_type} / {region} / D{delay} / {universe}")
134
+ # try:
135
+ # df = fetch_datasets_for_combo(session, instrument_type, region, delay, universe)
136
+ # print(f" -> {len(df)} rows")
137
+ # all_datasets.append(df)
138
+ # except Exception as exc:
139
+ # print(f" -> Failed: {exc}")
140
+ #
141
+ # if not all_datasets:
142
+ # print("No datasets fetched; nothing to save.")
143
+ # return
144
+ #
145
+ # combined_df = pd.concat([df for df in all_datasets if not df.empty], ignore_index=True)
146
+ # if combined_df.empty:
147
+ # print("No datasets fetched; nothing to save.")
148
+ # return
149
+ #
150
+ # regions = sorted(combined_df["param_region"].dropna().unique())
151
+ # print(f"Found regions: {', '.join(regions)}")
152
+ #
153
+ # for region in regions:
154
+ # region_df = combined_df[combined_df["param_region"] == region]
155
+ # region_unique = merge_and_deduplicate([region_df])
156
+ #
157
+ # region_list = []
158
+ # for _, row in region_unique.iterrows():
159
+ # record = {col: to_jsonable(row[col]) for col in row.index}
160
+ # region_list.append(record)
161
+ #
162
+ # filename = f"{region.replace(' ', '_').lower()}_datasets.json"
163
+ # filepath = os.path.join(dataset_dir, filename)
164
+ # with open(filepath, "w", encoding="utf-8") as f:
165
+ # json.dump(region_list, f, ensure_ascii=False, indent=2)
166
+ #
167
+ # print(f"✓ Created {filename} with {len(region_list)} datasets")
166
168
 
167
169
 
168
170
  def process_documentation(session: ace_lib.SingleSession, knowledge_dir: str):
@@ -240,7 +242,7 @@ def main():
240
242
 
241
243
  # Ensure knowledge directory exists
242
244
  knowledge_dir = ensure_knowledge_dir()
243
- dataset_dir = knowledge_dir # Save datasets directly under knowledge
245
+ # dataset_dir = knowledge_dir # Save datasets directly under knowledge (disabled)
244
246
  print(f"Knowledge directory: {knowledge_dir}\n")
245
247
 
246
248
  # Process documentation (tutorials/pages)
@@ -262,16 +264,15 @@ def main():
262
264
  traceback.print_exc()
263
265
  return
264
266
 
265
- # Process datasets by region
266
- print("\nStarting dataset processing...\n")
267
- try:
268
- process_datasets(session, dataset_dir)
269
- except Exception as exc:
270
- print(f"✗ Failed to process datasets: {exc}")
271
- import traceback
272
- traceback.print_exc()
273
- return
274
-
267
+ # Dataset processing disabled; re-enable by uncommenting the block below.
268
+ # print("\nStarting dataset processing...\n")
269
+ # try:
270
+ # process_datasets(session, dataset_dir)
271
+ # except Exception as exc:
272
+ # print(f"✗ Failed to process datasets: {exc}")
273
+ # import traceback
274
+ # traceback.print_exc()
275
+ # return
275
276
 
276
277
  print("\n=== Processing Complete ===")
277
278
 
@@ -18,6 +18,7 @@ PREFERRED_MODELS = [
18
18
  # Final chosen model will be detected at runtime from supported list
19
19
  MODEL_NAME = None
20
20
  COLLECTION_NAME = "brain_kb_v5"
21
+ BATCH_SIZE = 128 # batch upserts to avoid huge single writes
21
22
 
22
23
  # Optional imports for different file types
23
24
  try:
@@ -34,7 +35,11 @@ class KnowledgeBase:
34
35
  def __init__(self, kb_path="knowledge", db_path="vector_db"):
35
36
  self.kb_path = os.path.abspath(kb_path)
36
37
  self.db_path = os.path.abspath(db_path)
38
+ self.meta_path = os.path.join(self.db_path, "_meta.json")
39
+ self.manifest_path = os.path.join(self.db_path, "_manifest.json")
37
40
  self._collection_reset_guard = False
41
+ self._query_reset_guard = False
42
+ self._sync_lock = threading.Lock()
38
43
 
39
44
  if not os.path.exists(self.kb_path):
40
45
  os.makedirs(self.kb_path)
@@ -71,9 +76,17 @@ class KnowledgeBase:
71
76
  # Store chosen model name for reference
72
77
  global MODEL_NAME
73
78
  MODEL_NAME = chosen
79
+
80
+ # Cache embedding dimension (detects library/model changes that corrupt existing indexes)
81
+ self.embed_dim = self._get_embedding_dim()
82
+ self.chroma_version = getattr(chromadb, "__version__", "unknown")
83
+
84
+ # If the stored index was built with a different model/dimension/chromadb version, wipe it
85
+ self._maybe_reset_for_incompatibility(chosen, self.embed_dim, self.chroma_version)
74
86
 
75
87
  # Initialize Vector DB
76
88
  self._init_collection()
89
+ self._healthcheck()
77
90
 
78
91
  # Initial sync
79
92
  self.sync_knowledge()
@@ -85,65 +98,153 @@ class KnowledgeBase:
85
98
  """(Re)initialize Chroma client/collection. If recreate=True, wipe on-disk index."""
86
99
  if recreate and os.path.exists(self.db_path):
87
100
  shutil.rmtree(self.db_path, ignore_errors=True)
88
- self.client = chromadb.PersistentClient(path=self.db_path)
89
- self.collection = self.client.get_or_create_collection(
90
- name=COLLECTION_NAME,
91
- metadata={"hnsw:space": "cosine"}
92
- )
101
+ try:
102
+ self.client = chromadb.PersistentClient(path=self.db_path)
103
+ self.collection = self.client.get_or_create_collection(
104
+ name=COLLECTION_NAME,
105
+ metadata={"hnsw:space": "cosine"}
106
+ )
107
+ except Exception as exc:
108
+ # If collection load itself fails, wipe and retry once to clear corrupted segments
109
+ if not recreate:
110
+ shutil.rmtree(self.db_path, ignore_errors=True)
111
+ return self._init_collection(recreate=True)
112
+ raise
113
+
114
+ # Persist metadata about the embedding model used to build this index
115
+ try:
116
+ os.makedirs(self.db_path, exist_ok=True)
117
+ with open(self.meta_path, "w", encoding="utf-8") as f:
118
+ json.dump({
119
+ "model": MODEL_NAME,
120
+ "embed_dim": self.embed_dim,
121
+ "chroma_version": self.chroma_version,
122
+ }, f)
123
+ except Exception:
124
+ pass # Metadata failure should not block runtime
125
+
126
+ def _healthcheck(self):
127
+ """Validate index readability right after startup; rebuild if corrupted."""
128
+ try:
129
+ _ = self.collection.count()
130
+ except Exception as e:
131
+ msg = str(e).lower()
132
+ if any(x in msg for x in ["hnsw", "segment", "compaction", "backfill"]):
133
+ print("Detected index corruption on startup. Rebuilding vector_db...")
134
+ shutil.rmtree(self.db_path, ignore_errors=True)
135
+ self._init_collection(recreate=True)
136
+ self.sync_knowledge(allow_reset=False)
137
+ else:
138
+ print(f"Index healthcheck encountered an unexpected error: {e}")
139
+
140
+ def _maybe_reset_for_incompatibility(self, chosen_model: str, embed_dim: int, chroma_version: str):
141
+ """If existing index meta differs (model/dimension/chromadb), wipe it."""
142
+ if not os.path.exists(self.db_path):
143
+ return
144
+ try:
145
+ with open(self.meta_path, "r", encoding="utf-8") as f:
146
+ meta = json.load(f)
147
+ prev_model = meta.get("model")
148
+ prev_dim = meta.get("embed_dim")
149
+ prev_chroma = meta.get("chroma_version")
150
+ if prev_model != chosen_model or prev_dim != embed_dim or prev_chroma != chroma_version:
151
+ shutil.rmtree(self.db_path, ignore_errors=True)
152
+ except Exception:
153
+ # If meta cannot be read, assume stale/corrupted and rebuild
154
+ shutil.rmtree(self.db_path, ignore_errors=True)
155
+
156
+ def _get_embedding_dim(self) -> int:
157
+ for vec in self.model.embed(["dimension_probe"]):
158
+ try:
159
+ return len(vec)
160
+ except Exception:
161
+ return len(list(vec))
162
+ raise RuntimeError("Failed to determine embedding dimension")
93
163
 
94
164
  def sync_knowledge(self, allow_reset: bool = True):
95
165
  """Scans the knowledge folder and updates the vector database."""
166
+ if not self._sync_lock.acquire(blocking=False):
167
+ print("Sync already running, skip this trigger.")
168
+ return
169
+
96
170
  print("Syncing knowledge base...")
171
+ manifest = self._load_manifest()
172
+ updated_manifest = {}
97
173
  supported_extensions = (".txt", ".md", ".pdf", ".docx", ".json")
98
- for filename in os.listdir(self.kb_path):
99
- file_path = os.path.join(self.kb_path, filename)
100
- if os.path.isfile(file_path) and filename.lower().endswith(supported_extensions):
101
- try:
102
- content = self._extract_text(file_path)
103
- if content:
104
- # Sliding window chunking on original text
105
- chunk_size = 400
106
- overlap = 80
107
- original_chunks = []
108
- for i in range(0, len(content), chunk_size - overlap):
109
- chunk = content[i:i + chunk_size].strip()
110
- if chunk:
111
- original_chunks.append(chunk)
112
-
113
- if original_chunks:
114
- # Normalize for embedding generation only (not for storage)
115
- normalized_chunks = [c.lower().replace('_', ' ') for c in original_chunks]
116
-
117
- ids = [f"{filename}_{i}" for i in range(len(original_chunks))]
118
- metadatas = [{"source": filename, "chunk": i} for i in range(len(original_chunks))]
119
-
120
- # Compute embeddings from normalized text
121
- embeddings = []
122
- for v in self.model.embed(normalized_chunks):
123
- try:
124
- embeddings.append(v.tolist())
125
- except Exception:
126
- embeddings.append(list(v))
174
+ current_files = []
175
+ try:
176
+ for filename in os.listdir(self.kb_path):
177
+ file_path = os.path.join(self.kb_path, filename)
178
+ if os.path.isfile(file_path) and filename.lower().endswith(supported_extensions):
179
+ current_files.append(filename)
180
+ mtime = os.path.getmtime(file_path)
181
+ size = os.path.getsize(file_path)
182
+ prev_meta = manifest.get(filename)
183
+ # Skip unchanged files
184
+ if prev_meta and prev_meta.get("mtime") == mtime and prev_meta.get("size") == size:
185
+ updated_manifest[filename] = prev_meta
186
+ continue
187
+ try:
188
+ content = self._extract_text(file_path)
189
+ if content:
190
+ # Sliding window chunking on original text
191
+ chunk_size = 800
192
+ overlap = 80
193
+ original_chunks = []
194
+ for i in range(0, len(content), chunk_size - overlap):
195
+ chunk = content[i:i + chunk_size].strip()
196
+ if chunk:
197
+ original_chunks.append(chunk)
127
198
 
128
- # Store ORIGINAL text (not normalized) so users see the real content
129
- self.collection.upsert(
130
- documents=original_chunks,
131
- ids=ids,
132
- metadatas=metadatas,
133
- embeddings=embeddings
134
- )
135
- print(f" ✓ Indexed {filename}: {len(original_chunks)} chunks")
199
+ if original_chunks:
200
+ # Normalize for embedding generation only (not for storage)
201
+ normalized_chunks = [c.lower().replace('_', ' ') for c in original_chunks]
202
+
203
+ ids = [f"{filename}_{i}" for i in range(len(original_chunks))]
204
+ metadatas = [{"source": filename, "chunk": i} for i in range(len(original_chunks))]
205
+
206
+ # Compute embeddings from normalized text
207
+ embeddings = []
208
+ for v in self.model.embed(normalized_chunks):
209
+ try:
210
+ embeddings.append(v.tolist())
211
+ except Exception:
212
+ embeddings.append(list(v))
213
+
214
+ # Store ORIGINAL text (not normalized) so users see the real content
215
+ for start in range(0, len(original_chunks), BATCH_SIZE):
216
+ end = start + BATCH_SIZE
217
+ self.collection.upsert(
218
+ documents=original_chunks[start:end],
219
+ ids=ids[start:end],
220
+ metadatas=metadatas[start:end],
221
+ embeddings=embeddings[start:end]
222
+ )
223
+ print(f" ✓ Indexed {filename}: {len(original_chunks)} chunks (batched)")
224
+ updated_manifest[filename] = {"mtime": mtime, "size": size}
225
+ except Exception as e:
226
+ err_msg = str(e)
227
+ print(f"Error processing {filename}: {err_msg}")
228
+ # Auto-recover if HNSW/compaction/index errors occur
229
+ if allow_reset and any(x in err_msg.lower() for x in ["hnsw", "compaction", "segment reader"]):
230
+ if not self._collection_reset_guard:
231
+ print("Detected index corruption. Rebuilding vector_db and retrying sync once...")
232
+ self._collection_reset_guard = True
233
+ self._init_collection(recreate=True)
234
+ return self.sync_knowledge(allow_reset=False)
235
+ # Remove deleted files from the index
236
+ deleted_files = set(manifest.keys()) - set(current_files)
237
+ for filename in deleted_files:
238
+ try:
239
+ self.collection.delete(where={"source": filename})
240
+ print(f" ✓ Removed deleted file from index: {filename}")
136
241
  except Exception as e:
137
- err_msg = str(e)
138
- print(f"Error processing {filename}: {err_msg}")
139
- # Auto-recover if HNSW/compaction/index errors occur
140
- if allow_reset and any(x in err_msg.lower() for x in ["hnsw", "compaction", "segment reader"]):
141
- if not self._collection_reset_guard:
142
- print("Detected index corruption. Rebuilding vector_db and retrying sync once...")
143
- self._collection_reset_guard = True
144
- self._init_collection(recreate=True)
145
- return self.sync_knowledge(allow_reset=False)
146
- print("Knowledge base sync complete.")
242
+ print(f" ! Failed to remove {filename}: {e}")
243
+ # Persist manifest
244
+ self._save_manifest(updated_manifest)
245
+ print("Knowledge base sync complete.")
246
+ finally:
247
+ self._sync_lock.release()
147
248
 
148
249
  def _extract_text(self, file_path):
149
250
  ext = os.path.splitext(file_path)[1].lower()
@@ -175,7 +276,7 @@ class KnowledgeBase:
175
276
  return json.dumps(data, ensure_ascii=False, indent=2)
176
277
  return None
177
278
 
178
- def query(self, text, top_k=5, distance_threshold=0.8):
279
+ def query(self, text, top_k=5, distance_threshold=0.8, allow_reset: bool = True):
179
280
  """Retrieves relevant snippets from the knowledge base.
180
281
 
181
282
  Uses cosine distance (lower is better). A result is treated as a hit only
@@ -237,9 +338,25 @@ class KnowledgeBase:
237
338
  context = "\n---\n".join([h["text"] for h in hits]) if is_hit else ""
238
339
  return {"hit": is_hit, "context": context, "hits": hits}
239
340
  except Exception as e:
240
- print(f"Query error: {e}")
341
+ err_msg = str(e)
342
+ print(f"Query error: {err_msg}")
241
343
  import traceback
242
344
  traceback.print_exc()
345
+
346
+ # Auto-recover if HNSW/compaction/backfill errors surface during query
347
+ if allow_reset and any(x in err_msg.lower() for x in ["hnsw", "compaction", "segment reader", "backfill"]):
348
+ if not self._query_reset_guard:
349
+ print("Detected index corruption during query. Rebuilding vector_db and retrying once...")
350
+ self._query_reset_guard = True
351
+ try:
352
+ self._init_collection(recreate=True)
353
+ self.sync_knowledge(allow_reset=False)
354
+ # Retry query once with guard disabled to avoid loops
355
+ self._query_reset_guard = False
356
+ return self.query(text, top_k=top_k, distance_threshold=distance_threshold, allow_reset=False)
357
+ except Exception as inner_e:
358
+ print(f"Auto-rebuild after query failure also failed: {inner_e}")
359
+ self._query_reset_guard = False
243
360
  return {"hit": False, "context": "", "hits": []}
244
361
 
245
362
  def start_watcher(self):
@@ -248,18 +365,44 @@ class KnowledgeBase:
248
365
  self.observer.schedule(event_handler, self.kb_path, recursive=False)
249
366
  self.observer.start()
250
367
 
368
+ def _load_manifest(self):
369
+ if not os.path.exists(self.manifest_path):
370
+ return {}
371
+ try:
372
+ with open(self.manifest_path, "r", encoding="utf-8") as f:
373
+ return json.load(f)
374
+ except Exception:
375
+ return {}
376
+
377
+ def _save_manifest(self, data):
378
+ try:
379
+ os.makedirs(self.db_path, exist_ok=True)
380
+ with open(self.manifest_path, "w", encoding="utf-8") as f:
381
+ json.dump(data, f, ensure_ascii=False, indent=2)
382
+ except Exception as e:
383
+ print(f" ! Failed to save manifest: {e}")
384
+
251
385
  class KBHandler(FileSystemEventHandler):
252
386
  def __init__(self, kb_instance):
253
387
  self.kb = kb_instance
254
388
  self.supported_extensions = (".txt", ".md", ".pdf", ".docx", ".json")
389
+ self._debounce_timer = None
390
+
391
+ def _trigger_sync(self):
392
+ def run():
393
+ self.kb.sync_knowledge()
394
+ if self._debounce_timer and self._debounce_timer.is_alive():
395
+ return
396
+ self._debounce_timer = threading.Timer(0.5, run)
397
+ self._debounce_timer.start()
255
398
 
256
399
  def on_modified(self, event):
257
400
  if not event.is_directory and event.src_path.lower().endswith(self.supported_extensions):
258
401
  print(f"File modified: {event.src_path}. Re-syncing...")
259
- threading.Thread(target=self.kb.sync_knowledge).start()
402
+ self._trigger_sync()
260
403
 
261
404
  def on_created(self, event):
262
405
  if not event.is_directory and event.src_path.lower().endswith(self.supported_extensions):
263
406
  print(f"File created: {event.src_path}. Syncing...")
264
- threading.Thread(target=self.kb.sync_knowledge).start()
407
+ self._trigger_sync()
265
408
 
@@ -1,12 +1,7 @@
1
1
  openai
2
- pyautogui
3
2
  Pillow
4
- pyperclip
5
- keyboard
6
- fastembed>=0.3.4
7
- chromadb>=0.5.0
3
+ fastembed>=0.3.6
4
+ chromadb>=0.5.11
8
5
  watchdog
9
- urllib3>=2.5.0
10
6
  pypdf
11
7
  python-docx
12
- pywin32
@@ -4729,7 +4729,7 @@ async def main():
4729
4729
  input_str = input()
4730
4730
  if input_str == "":
4731
4731
  config_path = os.path.join(os.path.dirname(__file__), 'transformer_config.json')
4732
- with open(config_path, 'r') as f:
4732
+ with open(config_path, 'r', encoding='utf-8') as f:
4733
4733
  config = json.load(f)
4734
4734
  print("\n" + "="*60)
4735
4735
  print("✓ 已从 transformer_config.json 加载账号配置")
@@ -122,7 +122,7 @@ def save_simulation_result(result: dict) -> None:
122
122
 
123
123
  os.makedirs(folder_path, exist_ok=True)
124
124
 
125
- with open(file_path, "w") as file:
125
+ with open(file_path, "w", encoding="utf-8") as file:
126
126
  json.dump(result, file)
127
127
 
128
128
 
@@ -122,7 +122,7 @@ def save_simulation_result(result: dict) -> None:
122
122
 
123
123
  os.makedirs(folder_path, exist_ok=True)
124
124
 
125
- with open(file_path, "w") as file:
125
+ with open(file_path, "w", encoding="utf-8") as file:
126
126
  json.dump(result, file)
127
127
 
128
128
 
@@ -122,7 +122,7 @@ def save_simulation_result(result: dict) -> None:
122
122
 
123
123
  os.makedirs(folder_path, exist_ok=True)
124
124
 
125
- with open(file_path, "w") as file:
125
+ with open(file_path, "w", encoding="utf-8") as file:
126
126
  json.dump(result, file)
127
127
 
128
128
 
@@ -117,7 +117,7 @@ def get_credentials() -> tuple[str, str]:
117
117
  credentials_file_path = os.path.join(credentials_folder_path, "platform-brain.json")
118
118
 
119
119
  if Path(credentials_file_path).exists() and os.path.getsize(credentials_file_path) > 2:
120
- with open(credentials_file_path) as file:
120
+ with open(credentials_file_path, encoding="utf-8") as file:
121
121
  data = json.loads(file.read())
122
122
  else:
123
123
  os.makedirs(credentials_folder_path, exist_ok=True)
@@ -128,7 +128,7 @@ def get_credentials() -> tuple[str, str]:
128
128
  email = input("Email:\n")
129
129
  password = getpass.getpass(prompt="Password:")
130
130
  data = {"email": email, "password": password}
131
- with open(credentials_file_path, "w") as file:
131
+ with open(credentials_file_path, "w", encoding="utf-8") as file:
132
132
  json.dump(data, file)
133
133
  return (data["email"], data["password"])
134
134
 
@@ -241,7 +241,7 @@ def get_user_json():
241
241
  json_path = re.sub(r'^["\']+|["\']+$', '', raw_path.strip())
242
242
  if os.path.exists(json_path):
243
243
  try:
244
- with open(json_path, 'r') as f:
244
+ with open(json_path, 'r', encoding='utf-8') as f:
245
245
  alpha_list = json.load(f)
246
246
  # Check master log for previous progress
247
247
  file_name = os.path.basename(json_path)
@@ -349,7 +349,7 @@ def simulation_worker(session, alpha_list, mode, json_path, location_path, retry
349
349
  sent_count += len(batch)
350
350
  update_master_log(json_path, sent_count - 1)
351
351
  locations[str(time.time())] = location
352
- with open(location_path, 'w') as f:
352
+ with open(location_path, 'w', encoding='utf-8') as f:
353
353
  json.dump(locations, f, indent=2)
354
354
  # Do NOT overwrite the input JSON file
355
355
  logger.info(f'Simulation sent, location(s) saved: {location}')
@@ -370,7 +370,7 @@ def result_worker(session, location_path, result_path, poll_interval=30):
370
370
  if not os.path.exists(location_path):
371
371
  time.sleep(poll_interval)
372
372
  continue
373
- with open(location_path, 'r') as f:
373
+ with open(location_path, 'r', encoding='utf-8') as f:
374
374
  locations = json.load(f)
375
375
  for loc_key, loc_val in locations.items():
376
376
  if loc_key in results:
@@ -418,7 +418,7 @@ def result_worker(session, location_path, result_path, poll_interval=30):
418
418
  alpha = session.get(f"https://api.worldquantbrain.com/alphas/{alpha_id}")
419
419
  results[loc_key] = alpha.json()
420
420
  logger.info(f"Result fetched for location {loc_val}")
421
- with open(result_path, 'w') as f:
421
+ with open(result_path, 'w', encoding='utf-8') as f:
422
422
  json.dump(results, f, indent=2)
423
423
  except Exception as e:
424
424
  logger.error(f'Error fetching result for {loc_val}: {e}')
@@ -122,7 +122,7 @@ def save_simulation_result(result: dict) -> None:
122
122
 
123
123
  os.makedirs(folder_path, exist_ok=True)
124
124
 
125
- with open(file_path, "w") as file:
125
+ with open(file_path, "w", encoding="utf-8") as file:
126
126
  json.dump(result, file)
127
127
 
128
128
 
@@ -94,7 +94,7 @@ def get_json_filepath():
94
94
  # Verify file exists
95
95
  if os.path.exists(json_path):
96
96
  try:
97
- with open(json_path, 'r') as f:
97
+ with open(json_path, 'r', encoding='utf-8') as f:
98
98
  data = json.load(f)
99
99
  print(f"✓ 成功加载 JSON 文件: {json_path}")
100
100
  return json_path, data
@@ -122,7 +122,7 @@ def save_simulation_result(result: dict) -> None:
122
122
 
123
123
  os.makedirs(folder_path, exist_ok=True)
124
124
 
125
- with open(file_path, "w") as file:
125
+ with open(file_path, "w", encoding="utf-8") as file:
126
126
  json.dump(result, file)
127
127
 
128
128
 
@@ -1376,7 +1376,7 @@ def load_config() -> Dict[str, Any]:
1376
1376
  config_file = _resolve_config_path()
1377
1377
  if os.path.exists(config_file):
1378
1378
  try:
1379
- with open(config_file, 'r') as f:
1379
+ with open(config_file, 'r', encoding='utf-8') as f:
1380
1380
  return json.load(f)
1381
1381
  except (IOError, json.JSONDecodeError) as e:
1382
1382
  logger.error(f"Error loading config file {config_file}: {e}")
@@ -1390,7 +1390,7 @@ def save_config(config: Dict[str, Any]):
1390
1390
  """
1391
1391
  config_file = _resolve_config_path(for_write=True)
1392
1392
  try:
1393
- with open(config_file, 'w') as f:
1393
+ with open(config_file, 'w', encoding='utf-8') as f:
1394
1394
  json.dump(config, f, indent=2)
1395
1395
  except IOError as e:
1396
1396
  logger.error(f"Error saving config file to {config_file}: {e}")
@@ -2093,7 +2093,7 @@ async def save_simulation_data(simulation_id: str, filename: str) -> Dict[str, A
2093
2093
  simulation_data = await brain_client.get_simulation_status(simulation_id)
2094
2094
 
2095
2095
  # Save to file
2096
- with open(filename, 'w') as f:
2096
+ with open(filename, 'w', encoding='utf-8') as f:
2097
2097
  json.dump(simulation_data, f, indent=2)
2098
2098
 
2099
2099
  return {"success": True, "filename": filename, "simulation_id": simulation_id}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cnhkmcp
3
- Version: 2.1.0
3
+ Version: 2.1.2
4
4
  Summary: A comprehensive Model Context Protocol (MCP) server for quantitative trading platform integration
5
5
  Home-page: https://github.com/cnhk/cnhkmcp
6
6
  Author: CNHK
@@ -2,7 +2,7 @@ cnhkmcp/__init__.py,sha256=6HAUbLyRJK-E8TxYkbuGHZ57oAxeH8LgrUVK19ttNhY,2758
2
2
  cnhkmcp/untracked/arXiv_API_Tool_Manual.md,sha256=I3hvI5mpmIjBuWptufoVSWFnuhyUc67oCGHEuR0p_xs,13552
3
3
  cnhkmcp/untracked/arxiv_api.py,sha256=-E-Ub9K-DXAYaCjrbobyfQ9H97gaZBc7pL6xPEyVHec,9020
4
4
  cnhkmcp/untracked/forum_functions.py,sha256=QW-CplAsqDkw-Wcwq-1tuZBq48dEO-vXZ8xw7X65EuE,42303
5
- cnhkmcp/untracked/platform_functions.py,sha256=3tXQnlYlXJyrYdEvys6q-4L-PMAk2sOVkBlntsbB4Ig,123322
5
+ cnhkmcp/untracked/platform_functions.py,sha256=M2U2BWXZsqLBCFlZo_wv-UARDn4bNEM598db7WeBL8I,123340
6
6
  cnhkmcp/untracked/sample_mcp_config.json,sha256=QSFvZ086bxUQsvmLjcE6pL9ObzKn4FGnt9npWPo7Eps,1044
7
7
  cnhkmcp/untracked/user_config.json,sha256=_INn1X1qIsITrmEno-BRlQOAGm9wnNCw-6B333DEvnk,695
8
8
  cnhkmcp/untracked/示例参考文档_BRAIN_Alpha_Test_Requirements_and_Tips.md,sha256=W4dtQrqoTN72UyvIsvkGRF0HFOJLHSDeeSlbR3gqQg0,17133
@@ -13,28 +13,28 @@ cnhkmcp/untracked/示例工作流_Dataset_Exploration_Expert_Manual.md,sha256=-C
13
13
  cnhkmcp/untracked/示例工作流_daily_report_workflow.md,sha256=6aNmPqWRn09XdQMRxoVTka9IYVOUv5LcWparHu16EfQ,9460
14
14
  cnhkmcp/untracked/配置前运行我_安装必要依赖包.py,sha256=BnUyL5g6PaC62yEuS-8vcDSJ0oKu3k6jqQxi2jginuQ,6612
15
15
  cnhkmcp/untracked/AI桌面插件/README.md,sha256=HMeOogrMdlPCcbWoeCwJaWgnkmsEOO4bGyfzqpLP4h4,1246
16
+ cnhkmcp/untracked/AI桌面插件/ace.log,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
17
  cnhkmcp/untracked/AI桌面插件/config.json,sha256=flNYMbGeZ074viP30_wq3QieW143SGfhTg89hCs1EGM,1935
17
18
  cnhkmcp/untracked/AI桌面插件/icon.ico,sha256=oPkiKlN_f8zXTZhBmr6BVGjxfWr_wdIe5Flgq02wp-s,3284
18
19
  cnhkmcp/untracked/AI桌面插件/icon.png,sha256=xg3EfFWiXzibPH38JCC0xpQMfGH6asOZbT0lWFVzl00,1628
19
- cnhkmcp/untracked/AI桌面插件/main.py,sha256=7ILjHQ3xb6wK5hKxjnt9n2fXoQuYZe0Se4glNUKBf58,23167
20
- cnhkmcp/untracked/AI桌面插件/process_knowledge_base.py,sha256=kCjmB8Ib5JOawha-r7dDrR-SeBc21YeLz9coP_KQK-M,9537
21
- cnhkmcp/untracked/AI桌面插件/rag_engine.py,sha256=0FPvDn39AB5FnJlSgfQhTZgfEmHuaJywjM6o7m1uSsw,11659
22
- cnhkmcp/untracked/AI桌面插件/requirements.txt,sha256=y94u587oPtCvSgIDCte1BndmzoRXaq8WXOTPQlO_qxE,138
20
+ cnhkmcp/untracked/AI桌面插件/main.py,sha256=Jyj1fwwhPRkVKgQakRV3qY1ggzjy8eZX3VNuNp6QCG4,23015
21
+ cnhkmcp/untracked/AI桌面插件/process_knowledge_base.py,sha256=hKY3Uus81cfhBIxuCPiOV-5RDNAM3NxQ_VD0T3BezwQ,10073
22
+ cnhkmcp/untracked/AI桌面插件/rag_engine.py,sha256=fQiA817OOv9dNkNfR6M7oiXz2WaNZ78mEgwcnrF946E,18865
23
+ cnhkmcp/untracked/AI桌面插件/requirements.txt,sha256=jLAbyxGnvi_f05YakzmC_vfVUbfaBUmHBULAmKl0N2o,82
23
24
  cnhkmcp/untracked/AI桌面插件/run.bat,sha256=6Zj0DvBzq86mcptoxWPjmIQms8hYboMWDECM3xA8cr4,32
24
25
  cnhkmcp/untracked/AI桌面插件/首次运行打开我.py,sha256=TA5lFBAAKNvGmKwomzW6N8zkVj2QC5SR0CLXUrRVDuw,9570
25
26
  cnhkmcp/untracked/AI桌面插件/get_knowledgeBase_tool/ace_lib.py,sha256=z-6PCt3z3gJ_2pO9-17dtKkiSfrXPLv-WE-Ff85fqiw,53749
26
27
  cnhkmcp/untracked/AI桌面插件/get_knowledgeBase_tool/fetch_all_datasets.py,sha256=NoRN5NoouW62Us1RvsbyTkgGu0w9ZWkARA1j4gDO5EY,5366
27
28
  cnhkmcp/untracked/AI桌面插件/get_knowledgeBase_tool/fetch_all_documentation.py,sha256=PUviCKtUkGlgyVxznZq5y_wEnn32S9-dzlN91tbbWYs,5260
28
29
  cnhkmcp/untracked/AI桌面插件/get_knowledgeBase_tool/fetch_all_operators.py,sha256=CYvLDqSsKv9xcsm2TSbQoDlKr6-g1ixlXkB6qAIxtJg,2934
29
- cnhkmcp/untracked/AI桌面插件/get_knowledgeBase_tool/helpful_functions.py,sha256=VS-rh4T2CMWLFK4clI7-DYsnqwO5dx5L-rgL8U8BmT8,6622
30
+ cnhkmcp/untracked/AI桌面插件/get_knowledgeBase_tool/helpful_functions.py,sha256=Jc8gclRkvGx7el0XgTmZ67qwMSHrSluYheCUvzd6mbg,6640
30
31
  cnhkmcp/untracked/AI桌面插件/knowledge/test.txt,sha256=ZnOpCeIdB6pBvv-0-nq6EjJtwrVsBnj9nDuxwFVYC-E,82
31
- cnhkmcp/untracked/AI桌面插件/vector_db/chroma.sqlite3,sha256=wMXuSaCP2rp2C3mNzXctbw8igQckJQoFpHCEA8Jew4I,180224
32
32
  cnhkmcp/untracked/APP/.gitignore,sha256=oPCoVTNo82bhkN0c671LdjCpOTVpVhZI5NR75ztcg48,317
33
33
  cnhkmcp/untracked/APP/MODULAR_STRUCTURE.md,sha256=b5xV74-_RtXq2K1EsYDwMukO6lxjJ4-lnOAEnTHpFS0,4706
34
34
  cnhkmcp/untracked/APP/README.md,sha256=vb7hmQX0sH5aFNBmDCN5szMSDHm1_h2VKY4UKCt0aMk,11676
35
35
  cnhkmcp/untracked/APP/ace.log,sha256=HXNY3zIjxRA2b4xcRkQbX5qGdrb0Jk3lI-YnkMSVTow,11623
36
36
  cnhkmcp/untracked/APP/ace_lib.py,sha256=z-6PCt3z3gJ_2pO9-17dtKkiSfrXPLv-WE-Ff85fqiw,53749
37
- cnhkmcp/untracked/APP/helpful_functions.py,sha256=VS-rh4T2CMWLFK4clI7-DYsnqwO5dx5L-rgL8U8BmT8,6622
37
+ cnhkmcp/untracked/APP/helpful_functions.py,sha256=Jc8gclRkvGx7el0XgTmZ67qwMSHrSluYheCUvzd6mbg,6640
38
38
  cnhkmcp/untracked/APP/mirror_config.txt,sha256=RL1jFYwcvDPkLd6tc_lqVcwjTLORWt5Qu0Ym_BTPaao,504
39
39
  cnhkmcp/untracked/APP/operaters.csv,sha256=g8m6z-u0x-CoqGFSp_g3UMyLJkdFLE5UwsNkueMH1yw,13610
40
40
  cnhkmcp/untracked/APP/requirements.txt,sha256=T7qwWI9C916AVkKpYWxbcCCCX8iPL7vu3Ha_exnltRY,859
@@ -45,10 +45,10 @@ cnhkmcp/untracked/APP/setup_tsinghua.sh,sha256=mMDXTqCRIXtSHa_1pU0jCnNF-xajqfZDl
45
45
  cnhkmcp/untracked/APP/ssrn-3332513.pdf,sha256=GEwf1Srtk-fTvF03dhTEjXJstHBARIUg31k7s5kxS98,2082078
46
46
  cnhkmcp/untracked/APP/usage.md,sha256=lPpA6qqAMvVsm41ikbRR1ZWFcuPSgqhMXOUig52eZCI,16164
47
47
  cnhkmcp/untracked/APP/运行打开我.py,sha256=BMCOXXpywp0uUwSsUAL-UVXV3MLKnXy34sgAywfjfBk,99937
48
- cnhkmcp/untracked/APP/Tranformer/Transformer.py,sha256=Rc13OG8nVx22sfr7UpbwxLxU5b-9xhIVyKDqx20W33U,188808
48
+ cnhkmcp/untracked/APP/Tranformer/Transformer.py,sha256=cq9JYZv-s2kN3PbNj0Fk7L89ig74swhFMxgWEwQRZwA,188826
49
49
  cnhkmcp/untracked/APP/Tranformer/ace.log,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
50
  cnhkmcp/untracked/APP/Tranformer/ace_lib.py,sha256=z-6PCt3z3gJ_2pO9-17dtKkiSfrXPLv-WE-Ff85fqiw,53749
51
- cnhkmcp/untracked/APP/Tranformer/helpful_functions.py,sha256=VS-rh4T2CMWLFK4clI7-DYsnqwO5dx5L-rgL8U8BmT8,6622
51
+ cnhkmcp/untracked/APP/Tranformer/helpful_functions.py,sha256=Jc8gclRkvGx7el0XgTmZ67qwMSHrSluYheCUvzd6mbg,6640
52
52
  cnhkmcp/untracked/APP/Tranformer/parsetab.py,sha256=29clH5xFEmKpqzRvrLN89QE8JFJNYFhH-gEFR4y7448,7650
53
53
  cnhkmcp/untracked/APP/Tranformer/template_summary.txt,sha256=gWz5LFlPtOmw0JnLj68hgWsoSFNyA18uCJWIYWHLbe8,111054
54
54
  cnhkmcp/untracked/APP/Tranformer/transformer_config.json,sha256=q3Io_1o_BAt-2GNAUKgCs_YR3exkiGFnUqykiPUC1jw,191
@@ -69,13 +69,13 @@ cnhkmcp/untracked/APP/give_me_idea/ace_lib.py,sha256=z-6PCt3z3gJ_2pO9-17dtKkiSfr
69
69
  cnhkmcp/untracked/APP/give_me_idea/alpha_data_specific_template_master.py,sha256=3RXg5vcwl-YpuMwRuv4TLr2DCOmxxA0Z3n2DgRAlf9s,9006
70
70
  cnhkmcp/untracked/APP/give_me_idea/fetch_all_datasets.py,sha256=NoRN5NoouW62Us1RvsbyTkgGu0w9ZWkARA1j4gDO5EY,5366
71
71
  cnhkmcp/untracked/APP/give_me_idea/fetch_all_operators.py,sha256=CYvLDqSsKv9xcsm2TSbQoDlKr6-g1ixlXkB6qAIxtJg,2934
72
- cnhkmcp/untracked/APP/give_me_idea/helpful_functions.py,sha256=VS-rh4T2CMWLFK4clI7-DYsnqwO5dx5L-rgL8U8BmT8,6622
72
+ cnhkmcp/untracked/APP/give_me_idea/helpful_functions.py,sha256=Jc8gclRkvGx7el0XgTmZ67qwMSHrSluYheCUvzd6mbg,6640
73
73
  cnhkmcp/untracked/APP/give_me_idea/what_is_Alpha_template.md,sha256=QjwX0_b0DhhiNlo3ZwkIfXXSsJnk_FyjkZftyVwnCZ8,2317
74
- cnhkmcp/untracked/APP/hkSimulator/ace_lib.py,sha256=2AM67BKth4foMWkCdsz1CkOrSophtEFs7iatWqP8CVo,53481
75
- cnhkmcp/untracked/APP/hkSimulator/autosimulator.py,sha256=JmhmboakiDDQetWmn8t0-k6dgbbm0T3i-W6imVwuuGA,18641
76
- cnhkmcp/untracked/APP/hkSimulator/helpful_functions.py,sha256=VS-rh4T2CMWLFK4clI7-DYsnqwO5dx5L-rgL8U8BmT8,6622
74
+ cnhkmcp/untracked/APP/hkSimulator/ace_lib.py,sha256=46Mi2zMEmLXw53ixRjGiyXyBNYe5QAxzZlqYJ84Vsb4,53517
75
+ cnhkmcp/untracked/APP/hkSimulator/autosimulator.py,sha256=KHiwi4tCoLeOKg-14ZW06g2Z4mfG6kj1vWzFWH2VUrM,18713
76
+ cnhkmcp/untracked/APP/hkSimulator/helpful_functions.py,sha256=Jc8gclRkvGx7el0XgTmZ67qwMSHrSluYheCUvzd6mbg,6640
77
77
  cnhkmcp/untracked/APP/simulator/alpha_submitter.py,sha256=Wxx7DyFr0VwH709JhxXPj1Jiz8dShaVaYar3HTVMgv4,15652
78
- cnhkmcp/untracked/APP/simulator/simulator_wqb.py,sha256=NBvQKf4cYSQgknF0mrrYjt_x9E8w69lavSdxPqq2sCA,23756
78
+ cnhkmcp/untracked/APP/simulator/simulator_wqb.py,sha256=NsPSTs9jmVRguN5_y3qJRNr3hSFUs9JFQgh6y_mxyHw,23774
79
79
  cnhkmcp/untracked/APP/static/brain.js,sha256=pIRQdvyx6_LFzgRLiYfRhInr2WHd5dEjfM57-9VzUjg,20533
80
80
  cnhkmcp/untracked/APP/static/decoder.js,sha256=sKyfgsleHvokCMw6Zp-XfcjixkT8cyMvw6ASDhN6b9I,61987
81
81
  cnhkmcp/untracked/APP/static/feature_engineering.js,sha256=X6nis3FmytrNAE8mM-U6-VqmqZMAQ2X59Iy9RmOF5_8,73895
@@ -97,15 +97,15 @@ cnhkmcp/untracked/APP/templates/simulator.html,sha256=dPAY-fT7oEDCW945ZLLPSUPoaw
97
97
  cnhkmcp/untracked/APP/templates/transformer_web.html,sha256=de5fNhtbdsMMyW5cygvvvW9vSJL81O2A_QPqTmr0usc,27513
98
98
  cnhkmcp/untracked/APP/缘分一道桥/ace_lib.py,sha256=z-6PCt3z3gJ_2pO9-17dtKkiSfrXPLv-WE-Ff85fqiw,53749
99
99
  cnhkmcp/untracked/APP/缘分一道桥/brain_alpha_inspector.py,sha256=KjU6PNyD1Xfqqa0IGKM-VFH_p_4RKBzhmGQczM14KiM,24703
100
- cnhkmcp/untracked/APP/缘分一道桥/helpful_functions.py,sha256=VS-rh4T2CMWLFK4clI7-DYsnqwO5dx5L-rgL8U8BmT8,6622
100
+ cnhkmcp/untracked/APP/缘分一道桥/helpful_functions.py,sha256=Jc8gclRkvGx7el0XgTmZ67qwMSHrSluYheCUvzd6mbg,6640
101
101
  cnhkmcp/untracked/mcp文件论坛版2_如果原版启动不了浏览器就试这个/forum_functions.py,sha256=VuyUaguA0OjJbVRN5Vy8UEFXSAviS3jhDSRWyyPOtfo,18975
102
- cnhkmcp/untracked/mcp文件论坛版2_如果原版启动不了浏览器就试这个/platform_functions.py,sha256=aa5jCRjCORFars8CVahqwbk5ni0mLYWu9yND7Z4TGUM,99159
102
+ cnhkmcp/untracked/mcp文件论坛版2_如果原版启动不了浏览器就试这个/platform_functions.py,sha256=pkf6vXIztnPg7uJYgxPir_dqHaR9JEbqu-Qw1YJm6oU,99195
103
103
  cnhkmcp/untracked/mcp文件论坛版2_如果原版启动不了浏览器就试这个/user_config.json,sha256=_INn1X1qIsITrmEno-BRlQOAGm9wnNCw-6B333DEvnk,695
104
104
  cnhkmcp/untracked/mcp文件论坛版2_如果原版启动不了浏览器就试这个/让AI读这个文档来学会下载浏览器.md,sha256=v5QPSMjRDh52ZjgC4h8QjImnaqlVRLjTHGxmGjTo36g,3396
105
105
  cnhkmcp/untracked/mcp文件论坛版2_如果原版启动不了浏览器就试这个/配置前运行我_安装必要依赖包.py,sha256=BnUyL5g6PaC62yEuS-8vcDSJ0oKu3k6jqQxi2jginuQ,6612
106
- cnhkmcp-2.1.0.dist-info/licenses/LICENSE,sha256=QLxO2eNMnJQEdI_R1UV2AOD-IvuA8zVrkHWA4D9gtoc,1081
107
- cnhkmcp-2.1.0.dist-info/METADATA,sha256=udLw-mGyJptnSdeUJ-HketojYt_Vz8_HCurCXRf9OOI,5171
108
- cnhkmcp-2.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
109
- cnhkmcp-2.1.0.dist-info/entry_points.txt,sha256=lTQieVyIvjhSMK4fT-XwnccY-JBC1H4vVQ3V9dDM-Pc,70
110
- cnhkmcp-2.1.0.dist-info/top_level.txt,sha256=x--ibUcSgOS9Z_RWK2Qc-vfs7DaXQN-WMaaxEETJ1Bw,8
111
- cnhkmcp-2.1.0.dist-info/RECORD,,
106
+ cnhkmcp-2.1.2.dist-info/licenses/LICENSE,sha256=QLxO2eNMnJQEdI_R1UV2AOD-IvuA8zVrkHWA4D9gtoc,1081
107
+ cnhkmcp-2.1.2.dist-info/METADATA,sha256=RwgIoT95SQ_WKAusgghseGmAn4b1XKByDlVqjojNyyg,5171
108
+ cnhkmcp-2.1.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
109
+ cnhkmcp-2.1.2.dist-info/entry_points.txt,sha256=lTQieVyIvjhSMK4fT-XwnccY-JBC1H4vVQ3V9dDM-Pc,70
110
+ cnhkmcp-2.1.2.dist-info/top_level.txt,sha256=x--ibUcSgOS9Z_RWK2Qc-vfs7DaXQN-WMaaxEETJ1Bw,8
111
+ cnhkmcp-2.1.2.dist-info/RECORD,,