cnhkmcp 2.1.1__py3-none-any.whl → 2.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/get_knowledgeBase_tool/helpful_functions.py +1 -1
  2. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/process_knowledge_base.py +73 -72
  3. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/rag_engine.py +122 -70
  4. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/requirements.txt +2 -2
  5. cnhkmcp/untracked/APP/Tranformer/Transformer.py +1 -1
  6. cnhkmcp/untracked/APP/Tranformer/helpful_functions.py +1 -1
  7. cnhkmcp/untracked/APP/give_me_idea/helpful_functions.py +1 -1
  8. cnhkmcp/untracked/APP/helpful_functions.py +1 -1
  9. cnhkmcp/untracked/APP/hkSimulator/ace_lib.py +2 -2
  10. cnhkmcp/untracked/APP/hkSimulator/autosimulator.py +4 -4
  11. cnhkmcp/untracked/APP/hkSimulator/helpful_functions.py +1 -1
  12. cnhkmcp/untracked/APP/simulator/simulator_wqb.py +1 -1
  13. cnhkmcp/untracked/APP//321/207/342/225/235/320/250/321/205/320/230/320/226/321/204/342/225/225/320/220/321/211/320/221/320/243/321/206/320/261/320/265/helpful_functions.py +1 -1
  14. cnhkmcp/untracked/mcp/321/206/320/246/320/227/321/204/342/225/227/342/225/242/321/210/320/276/342/225/221/321/205/320/255/320/253/321/207/320/231/320/2302_/321/205/320/266/320/222/321/206/320/256/320/254/321/205/320/236/320/257/321/207/320/231/320/230/321/205/320/240/320/277/321/205/320/232/320/270/321/204/342/225/225/320/235/321/204/342/225/221/320/226/321/206/342/225/241/320/237/321/210/320/267/320/230/321/205/320/251/320/270/321/205/342/226/221/342/226/222/321/210/320/277/320/245/321/210/342/224/220/320/251/321/204/342/225/225/320/272/platform_functions.py +2 -2
  15. cnhkmcp/untracked/platform_functions.py +1 -1
  16. {cnhkmcp-2.1.1.dist-info → cnhkmcp-2.1.2.dist-info}/METADATA +1 -1
  17. {cnhkmcp-2.1.1.dist-info → cnhkmcp-2.1.2.dist-info}/RECORD +21 -25
  18. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/vector_db/_manifest.json +0 -326
  19. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/vector_db/_meta.json +0 -1
  20. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/vector_db/be5d957c-b724-46e3-91d1-999e9f5f7d28/index_metadata.pickle +0 -0
  21. cnhkmcp/untracked/AI/321/206/320/261/320/234/321/211/320/255/320/262/321/206/320/237/320/242/321/204/342/225/227/342/225/242/vector_db/chroma.sqlite3 +0 -0
  22. {cnhkmcp-2.1.1.dist-info → cnhkmcp-2.1.2.dist-info}/WHEEL +0 -0
  23. {cnhkmcp-2.1.1.dist-info → cnhkmcp-2.1.2.dist-info}/entry_points.txt +0 -0
  24. {cnhkmcp-2.1.1.dist-info → cnhkmcp-2.1.2.dist-info}/licenses/LICENSE +0 -0
  25. {cnhkmcp-2.1.1.dist-info → cnhkmcp-2.1.2.dist-info}/top_level.txt +0 -0
@@ -122,7 +122,7 @@ def save_simulation_result(result: dict) -> None:
122
122
 
123
123
  os.makedirs(folder_path, exist_ok=True)
124
124
 
125
- with open(file_path, "w") as file:
125
+ with open(file_path, "w", encoding="utf-8") as file:
126
126
  json.dump(result, file)
127
127
 
128
128
 
@@ -16,17 +16,18 @@ if TOOL_DIR not in sys.path:
16
16
  sys.path.insert(0, TOOL_DIR)
17
17
  import ace_lib
18
18
  from fetch_all_operators import fetch_operators, prompt_credentials
19
- from fetch_all_datasets import (
20
- fetch_all_combinations,
21
- fetch_datasets_for_combo,
22
- merge_and_deduplicate,
23
- )
24
19
  from fetch_all_documentation import (
25
20
  fetch_tutorials,
26
21
  fetch_tutorial_pages,
27
22
  fetch_page,
28
23
  _extract_page_id,
29
24
  )
25
+ # Dataset fetching currently disabled per request
26
+ # from fetch_all_datasets import (
27
+ # fetch_all_combinations,
28
+ # fetch_datasets_for_combo,
29
+ # merge_and_deduplicate,
30
+ # )
30
31
 
31
32
 
32
33
  def ensure_knowledge_dir():
@@ -107,62 +108,63 @@ def process_operators(session: ace_lib.SingleSession, knowledge_dir: str):
107
108
  print(f"✓ Created {filename} with {len(category_list)} operators")
108
109
 
109
110
 
110
- def process_datasets(session: ace_lib.SingleSession, dataset_dir: str):
111
- """Fetch datasets and save one JSON per region."""
112
- print("=== Processing Datasets ===")
113
-
114
- print("Fetching valid instrument/region/delay/universe combinations...")
115
- options_df = fetch_all_combinations(session)
116
- if options_df is None or options_df.empty:
117
- print("No simulation options fetched; aborting dataset fetch.")
118
- return
119
-
120
- all_datasets: list[pd.DataFrame] = []
121
- combo_idx = 0
122
-
123
- for _, row in options_df.iterrows():
124
- instrument_type = row.get("InstrumentType")
125
- region = row.get("Region")
126
- delay = row.get("Delay")
127
- universes = row.get("Universe") or []
128
-
129
- for universe in universes:
130
- combo_idx += 1
131
- print(f"[{combo_idx}] {instrument_type} / {region} / D{delay} / {universe}")
132
- try:
133
- df = fetch_datasets_for_combo(session, instrument_type, region, delay, universe)
134
- print(f" -> {len(df)} rows")
135
- all_datasets.append(df)
136
- except Exception as exc:
137
- print(f" -> Failed: {exc}")
138
-
139
- if not all_datasets:
140
- print("No datasets fetched; nothing to save.")
141
- return
142
-
143
- combined_df = pd.concat([df for df in all_datasets if not df.empty], ignore_index=True)
144
- if combined_df.empty:
145
- print("No datasets fetched; nothing to save.")
146
- return
147
-
148
- regions = sorted(combined_df["param_region"].dropna().unique())
149
- print(f"Found regions: {', '.join(regions)}")
150
-
151
- for region in regions:
152
- region_df = combined_df[combined_df["param_region"] == region]
153
- region_unique = merge_and_deduplicate([region_df])
154
-
155
- region_list = []
156
- for _, row in region_unique.iterrows():
157
- record = {col: to_jsonable(row[col]) for col in row.index}
158
- region_list.append(record)
159
-
160
- filename = f"{region.replace(' ', '_').lower()}_datasets.json"
161
- filepath = os.path.join(dataset_dir, filename)
162
- with open(filepath, "w", encoding="utf-8") as f:
163
- json.dump(region_list, f, ensure_ascii=False, indent=2)
164
-
165
- print(f"✓ Created {filename} with {len(region_list)} datasets")
111
+ # Dataset fetching intentionally disabled; keep for potential re-enable.
112
+ # def process_datasets(session: ace_lib.SingleSession, dataset_dir: str):
113
+ # """Fetch datasets and save one JSON per region."""
114
+ # print("=== Processing Datasets ===")
115
+ #
116
+ # print("Fetching valid instrument/region/delay/universe combinations...")
117
+ # options_df = fetch_all_combinations(session)
118
+ # if options_df is None or options_df.empty:
119
+ # print("No simulation options fetched; aborting dataset fetch.")
120
+ # return
121
+ #
122
+ # all_datasets: list[pd.DataFrame] = []
123
+ # combo_idx = 0
124
+ #
125
+ # for _, row in options_df.iterrows():
126
+ # instrument_type = row.get("InstrumentType")
127
+ # region = row.get("Region")
128
+ # delay = row.get("Delay")
129
+ # universes = row.get("Universe") or []
130
+ #
131
+ # for universe in universes:
132
+ # combo_idx += 1
133
+ # print(f"[{combo_idx}] {instrument_type} / {region} / D{delay} / {universe}")
134
+ # try:
135
+ # df = fetch_datasets_for_combo(session, instrument_type, region, delay, universe)
136
+ # print(f" -> {len(df)} rows")
137
+ # all_datasets.append(df)
138
+ # except Exception as exc:
139
+ # print(f" -> Failed: {exc}")
140
+ #
141
+ # if not all_datasets:
142
+ # print("No datasets fetched; nothing to save.")
143
+ # return
144
+ #
145
+ # combined_df = pd.concat([df for df in all_datasets if not df.empty], ignore_index=True)
146
+ # if combined_df.empty:
147
+ # print("No datasets fetched; nothing to save.")
148
+ # return
149
+ #
150
+ # regions = sorted(combined_df["param_region"].dropna().unique())
151
+ # print(f"Found regions: {', '.join(regions)}")
152
+ #
153
+ # for region in regions:
154
+ # region_df = combined_df[combined_df["param_region"] == region]
155
+ # region_unique = merge_and_deduplicate([region_df])
156
+ #
157
+ # region_list = []
158
+ # for _, row in region_unique.iterrows():
159
+ # record = {col: to_jsonable(row[col]) for col in row.index}
160
+ # region_list.append(record)
161
+ #
162
+ # filename = f"{region.replace(' ', '_').lower()}_datasets.json"
163
+ # filepath = os.path.join(dataset_dir, filename)
164
+ # with open(filepath, "w", encoding="utf-8") as f:
165
+ # json.dump(region_list, f, ensure_ascii=False, indent=2)
166
+ #
167
+ # print(f"✓ Created {filename} with {len(region_list)} datasets")
166
168
 
167
169
 
168
170
  def process_documentation(session: ace_lib.SingleSession, knowledge_dir: str):
@@ -240,7 +242,7 @@ def main():
240
242
 
241
243
  # Ensure knowledge directory exists
242
244
  knowledge_dir = ensure_knowledge_dir()
243
- dataset_dir = knowledge_dir # Save datasets directly under knowledge
245
+ # dataset_dir = knowledge_dir # Save datasets directly under knowledge (disabled)
244
246
  print(f"Knowledge directory: {knowledge_dir}\n")
245
247
 
246
248
  # Process documentation (tutorials/pages)
@@ -262,16 +264,15 @@ def main():
262
264
  traceback.print_exc()
263
265
  return
264
266
 
265
- # Process datasets by region
266
- print("\nStarting dataset processing...\n")
267
- try:
268
- process_datasets(session, dataset_dir)
269
- except Exception as exc:
270
- print(f"✗ Failed to process datasets: {exc}")
271
- import traceback
272
- traceback.print_exc()
273
- return
274
-
267
+ # Dataset processing disabled; re-enable by uncommenting the block below.
268
+ # print("\nStarting dataset processing...\n")
269
+ # try:
270
+ # process_datasets(session, dataset_dir)
271
+ # except Exception as exc:
272
+ # print(f"✗ Failed to process datasets: {exc}")
273
+ # import traceback
274
+ # traceback.print_exc()
275
+ # return
275
276
 
276
277
  print("\n=== Processing Complete ===")
277
278
 
@@ -18,6 +18,7 @@ PREFERRED_MODELS = [
18
18
  # Final chosen model will be detected at runtime from supported list
19
19
  MODEL_NAME = None
20
20
  COLLECTION_NAME = "brain_kb_v5"
21
+ BATCH_SIZE = 128 # batch upserts to avoid huge single writes
21
22
 
22
23
  # Optional imports for different file types
23
24
  try:
@@ -37,6 +38,8 @@ class KnowledgeBase:
37
38
  self.meta_path = os.path.join(self.db_path, "_meta.json")
38
39
  self.manifest_path = os.path.join(self.db_path, "_manifest.json")
39
40
  self._collection_reset_guard = False
41
+ self._query_reset_guard = False
42
+ self._sync_lock = threading.Lock()
40
43
 
41
44
  if not os.path.exists(self.kb_path):
42
45
  os.makedirs(self.kb_path)
@@ -83,6 +86,7 @@ class KnowledgeBase:
83
86
 
84
87
  # Initialize Vector DB
85
88
  self._init_collection()
89
+ self._healthcheck()
86
90
 
87
91
  # Initial sync
88
92
  self.sync_knowledge()
@@ -119,6 +123,20 @@ class KnowledgeBase:
119
123
  except Exception:
120
124
  pass # Metadata failure should not block runtime
121
125
 
126
+ def _healthcheck(self):
127
+ """Validate index readability right after startup; rebuild if corrupted."""
128
+ try:
129
+ _ = self.collection.count()
130
+ except Exception as e:
131
+ msg = str(e).lower()
132
+ if any(x in msg for x in ["hnsw", "segment", "compaction", "backfill"]):
133
+ print("Detected index corruption on startup. Rebuilding vector_db...")
134
+ shutil.rmtree(self.db_path, ignore_errors=True)
135
+ self._init_collection(recreate=True)
136
+ self.sync_knowledge(allow_reset=False)
137
+ else:
138
+ print(f"Index healthcheck encountered an unexpected error: {e}")
139
+
122
140
  def _maybe_reset_for_incompatibility(self, chosen_model: str, embed_dim: int, chroma_version: str):
123
141
  """If existing index meta differs (model/dimension/chromadb), wipe it."""
124
142
  if not os.path.exists(self.db_path):
@@ -145,79 +163,88 @@ class KnowledgeBase:
145
163
 
146
164
  def sync_knowledge(self, allow_reset: bool = True):
147
165
  """Scans the knowledge folder and updates the vector database."""
166
+ if not self._sync_lock.acquire(blocking=False):
167
+ print("Sync already running, skip this trigger.")
168
+ return
169
+
148
170
  print("Syncing knowledge base...")
149
171
  manifest = self._load_manifest()
150
172
  updated_manifest = {}
151
173
  supported_extensions = (".txt", ".md", ".pdf", ".docx", ".json")
152
174
  current_files = []
153
- for filename in os.listdir(self.kb_path):
154
- file_path = os.path.join(self.kb_path, filename)
155
- if os.path.isfile(file_path) and filename.lower().endswith(supported_extensions):
156
- current_files.append(filename)
157
- mtime = os.path.getmtime(file_path)
158
- size = os.path.getsize(file_path)
159
- prev_meta = manifest.get(filename)
160
- # Skip unchanged files
161
- if prev_meta and prev_meta.get("mtime") == mtime and prev_meta.get("size") == size:
162
- updated_manifest[filename] = prev_meta
163
- continue
164
- try:
165
- content = self._extract_text(file_path)
166
- if content:
167
- # Sliding window chunking on original text
168
- chunk_size = 400
169
- overlap = 80
170
- original_chunks = []
171
- for i in range(0, len(content), chunk_size - overlap):
172
- chunk = content[i:i + chunk_size].strip()
173
- if chunk:
174
- original_chunks.append(chunk)
175
-
176
- if original_chunks:
177
- # Normalize for embedding generation only (not for storage)
178
- normalized_chunks = [c.lower().replace('_', ' ') for c in original_chunks]
179
-
180
- ids = [f"{filename}_{i}" for i in range(len(original_chunks))]
181
- metadatas = [{"source": filename, "chunk": i} for i in range(len(original_chunks))]
182
-
183
- # Compute embeddings from normalized text
184
- embeddings = []
185
- for v in self.model.embed(normalized_chunks):
186
- try:
187
- embeddings.append(v.tolist())
188
- except Exception:
189
- embeddings.append(list(v))
175
+ try:
176
+ for filename in os.listdir(self.kb_path):
177
+ file_path = os.path.join(self.kb_path, filename)
178
+ if os.path.isfile(file_path) and filename.lower().endswith(supported_extensions):
179
+ current_files.append(filename)
180
+ mtime = os.path.getmtime(file_path)
181
+ size = os.path.getsize(file_path)
182
+ prev_meta = manifest.get(filename)
183
+ # Skip unchanged files
184
+ if prev_meta and prev_meta.get("mtime") == mtime and prev_meta.get("size") == size:
185
+ updated_manifest[filename] = prev_meta
186
+ continue
187
+ try:
188
+ content = self._extract_text(file_path)
189
+ if content:
190
+ # Sliding window chunking on original text
191
+ chunk_size = 800
192
+ overlap = 80
193
+ original_chunks = []
194
+ for i in range(0, len(content), chunk_size - overlap):
195
+ chunk = content[i:i + chunk_size].strip()
196
+ if chunk:
197
+ original_chunks.append(chunk)
190
198
 
191
- # Store ORIGINAL text (not normalized) so users see the real content
192
- self.collection.upsert(
193
- documents=original_chunks,
194
- ids=ids,
195
- metadatas=metadatas,
196
- embeddings=embeddings
197
- )
198
- print(f" ✓ Indexed {filename}: {len(original_chunks)} chunks")
199
- updated_manifest[filename] = {"mtime": mtime, "size": size}
199
+ if original_chunks:
200
+ # Normalize for embedding generation only (not for storage)
201
+ normalized_chunks = [c.lower().replace('_', ' ') for c in original_chunks]
202
+
203
+ ids = [f"{filename}_{i}" for i in range(len(original_chunks))]
204
+ metadatas = [{"source": filename, "chunk": i} for i in range(len(original_chunks))]
205
+
206
+ # Compute embeddings from normalized text
207
+ embeddings = []
208
+ for v in self.model.embed(normalized_chunks):
209
+ try:
210
+ embeddings.append(v.tolist())
211
+ except Exception:
212
+ embeddings.append(list(v))
213
+
214
+ # Store ORIGINAL text (not normalized) so users see the real content
215
+ for start in range(0, len(original_chunks), BATCH_SIZE):
216
+ end = start + BATCH_SIZE
217
+ self.collection.upsert(
218
+ documents=original_chunks[start:end],
219
+ ids=ids[start:end],
220
+ metadatas=metadatas[start:end],
221
+ embeddings=embeddings[start:end]
222
+ )
223
+ print(f" ✓ Indexed {filename}: {len(original_chunks)} chunks (batched)")
224
+ updated_manifest[filename] = {"mtime": mtime, "size": size}
225
+ except Exception as e:
226
+ err_msg = str(e)
227
+ print(f"Error processing {filename}: {err_msg}")
228
+ # Auto-recover if HNSW/compaction/index errors occur
229
+ if allow_reset and any(x in err_msg.lower() for x in ["hnsw", "compaction", "segment reader"]):
230
+ if not self._collection_reset_guard:
231
+ print("Detected index corruption. Rebuilding vector_db and retrying sync once...")
232
+ self._collection_reset_guard = True
233
+ self._init_collection(recreate=True)
234
+ return self.sync_knowledge(allow_reset=False)
235
+ # Remove deleted files from the index
236
+ deleted_files = set(manifest.keys()) - set(current_files)
237
+ for filename in deleted_files:
238
+ try:
239
+ self.collection.delete(where={"source": filename})
240
+ print(f" ✓ Removed deleted file from index: {filename}")
200
241
  except Exception as e:
201
- err_msg = str(e)
202
- print(f"Error processing {filename}: {err_msg}")
203
- # Auto-recover if HNSW/compaction/index errors occur
204
- if allow_reset and any(x in err_msg.lower() for x in ["hnsw", "compaction", "segment reader"]):
205
- if not self._collection_reset_guard:
206
- print("Detected index corruption. Rebuilding vector_db and retrying sync once...")
207
- self._collection_reset_guard = True
208
- self._init_collection(recreate=True)
209
- return self.sync_knowledge(allow_reset=False)
210
- # Remove deleted files from the index
211
- deleted_files = set(manifest.keys()) - set(current_files)
212
- for filename in deleted_files:
213
- try:
214
- self.collection.delete(where={"source": filename})
215
- print(f" ✓ Removed deleted file from index: {filename}")
216
- except Exception as e:
217
- print(f" ! Failed to remove {filename}: {e}")
218
- # Persist manifest
219
- self._save_manifest(updated_manifest)
220
- print("Knowledge base sync complete.")
242
+ print(f" ! Failed to remove {filename}: {e}")
243
+ # Persist manifest
244
+ self._save_manifest(updated_manifest)
245
+ print("Knowledge base sync complete.")
246
+ finally:
247
+ self._sync_lock.release()
221
248
 
222
249
  def _extract_text(self, file_path):
223
250
  ext = os.path.splitext(file_path)[1].lower()
@@ -249,7 +276,7 @@ class KnowledgeBase:
249
276
  return json.dumps(data, ensure_ascii=False, indent=2)
250
277
  return None
251
278
 
252
- def query(self, text, top_k=5, distance_threshold=0.8):
279
+ def query(self, text, top_k=5, distance_threshold=0.8, allow_reset: bool = True):
253
280
  """Retrieves relevant snippets from the knowledge base.
254
281
 
255
282
  Uses cosine distance (lower is better). A result is treated as a hit only
@@ -311,9 +338,25 @@ class KnowledgeBase:
311
338
  context = "\n---\n".join([h["text"] for h in hits]) if is_hit else ""
312
339
  return {"hit": is_hit, "context": context, "hits": hits}
313
340
  except Exception as e:
314
- print(f"Query error: {e}")
341
+ err_msg = str(e)
342
+ print(f"Query error: {err_msg}")
315
343
  import traceback
316
344
  traceback.print_exc()
345
+
346
+ # Auto-recover if HNSW/compaction/backfill errors surface during query
347
+ if allow_reset and any(x in err_msg.lower() for x in ["hnsw", "compaction", "segment reader", "backfill"]):
348
+ if not self._query_reset_guard:
349
+ print("Detected index corruption during query. Rebuilding vector_db and retrying once...")
350
+ self._query_reset_guard = True
351
+ try:
352
+ self._init_collection(recreate=True)
353
+ self.sync_knowledge(allow_reset=False)
354
+ # Retry query once with guard disabled to avoid loops
355
+ self._query_reset_guard = False
356
+ return self.query(text, top_k=top_k, distance_threshold=distance_threshold, allow_reset=False)
357
+ except Exception as inner_e:
358
+ print(f"Auto-rebuild after query failure also failed: {inner_e}")
359
+ self._query_reset_guard = False
317
360
  return {"hit": False, "context": "", "hits": []}
318
361
 
319
362
  def start_watcher(self):
@@ -343,14 +386,23 @@ class KBHandler(FileSystemEventHandler):
343
386
  def __init__(self, kb_instance):
344
387
  self.kb = kb_instance
345
388
  self.supported_extensions = (".txt", ".md", ".pdf", ".docx", ".json")
389
+ self._debounce_timer = None
390
+
391
+ def _trigger_sync(self):
392
+ def run():
393
+ self.kb.sync_knowledge()
394
+ if self._debounce_timer and self._debounce_timer.is_alive():
395
+ return
396
+ self._debounce_timer = threading.Timer(0.5, run)
397
+ self._debounce_timer.start()
346
398
 
347
399
  def on_modified(self, event):
348
400
  if not event.is_directory and event.src_path.lower().endswith(self.supported_extensions):
349
401
  print(f"File modified: {event.src_path}. Re-syncing...")
350
- threading.Thread(target=self.kb.sync_knowledge).start()
402
+ self._trigger_sync()
351
403
 
352
404
  def on_created(self, event):
353
405
  if not event.is_directory and event.src_path.lower().endswith(self.supported_extensions):
354
406
  print(f"File created: {event.src_path}. Syncing...")
355
- threading.Thread(target=self.kb.sync_knowledge).start()
407
+ self._trigger_sync()
356
408
 
@@ -1,7 +1,7 @@
1
1
  openai
2
2
  Pillow
3
- fastembed>=0.3.4
4
- chromadb>=0.5.0
3
+ fastembed>=0.3.6
4
+ chromadb>=0.5.11
5
5
  watchdog
6
6
  pypdf
7
7
  python-docx
@@ -4729,7 +4729,7 @@ async def main():
4729
4729
  input_str = input()
4730
4730
  if input_str == "":
4731
4731
  config_path = os.path.join(os.path.dirname(__file__), 'transformer_config.json')
4732
- with open(config_path, 'r') as f:
4732
+ with open(config_path, 'r', encoding='utf-8') as f:
4733
4733
  config = json.load(f)
4734
4734
  print("\n" + "="*60)
4735
4735
  print("✓ 已从 transformer_config.json 加载账号配置")
@@ -122,7 +122,7 @@ def save_simulation_result(result: dict) -> None:
122
122
 
123
123
  os.makedirs(folder_path, exist_ok=True)
124
124
 
125
- with open(file_path, "w") as file:
125
+ with open(file_path, "w", encoding="utf-8") as file:
126
126
  json.dump(result, file)
127
127
 
128
128
 
@@ -122,7 +122,7 @@ def save_simulation_result(result: dict) -> None:
122
122
 
123
123
  os.makedirs(folder_path, exist_ok=True)
124
124
 
125
- with open(file_path, "w") as file:
125
+ with open(file_path, "w", encoding="utf-8") as file:
126
126
  json.dump(result, file)
127
127
 
128
128
 
@@ -122,7 +122,7 @@ def save_simulation_result(result: dict) -> None:
122
122
 
123
123
  os.makedirs(folder_path, exist_ok=True)
124
124
 
125
- with open(file_path, "w") as file:
125
+ with open(file_path, "w", encoding="utf-8") as file:
126
126
  json.dump(result, file)
127
127
 
128
128
 
@@ -117,7 +117,7 @@ def get_credentials() -> tuple[str, str]:
117
117
  credentials_file_path = os.path.join(credentials_folder_path, "platform-brain.json")
118
118
 
119
119
  if Path(credentials_file_path).exists() and os.path.getsize(credentials_file_path) > 2:
120
- with open(credentials_file_path) as file:
120
+ with open(credentials_file_path, encoding="utf-8") as file:
121
121
  data = json.loads(file.read())
122
122
  else:
123
123
  os.makedirs(credentials_folder_path, exist_ok=True)
@@ -128,7 +128,7 @@ def get_credentials() -> tuple[str, str]:
128
128
  email = input("Email:\n")
129
129
  password = getpass.getpass(prompt="Password:")
130
130
  data = {"email": email, "password": password}
131
- with open(credentials_file_path, "w") as file:
131
+ with open(credentials_file_path, "w", encoding="utf-8") as file:
132
132
  json.dump(data, file)
133
133
  return (data["email"], data["password"])
134
134
 
@@ -241,7 +241,7 @@ def get_user_json():
241
241
  json_path = re.sub(r'^["\']+|["\']+$', '', raw_path.strip())
242
242
  if os.path.exists(json_path):
243
243
  try:
244
- with open(json_path, 'r') as f:
244
+ with open(json_path, 'r', encoding='utf-8') as f:
245
245
  alpha_list = json.load(f)
246
246
  # Check master log for previous progress
247
247
  file_name = os.path.basename(json_path)
@@ -349,7 +349,7 @@ def simulation_worker(session, alpha_list, mode, json_path, location_path, retry
349
349
  sent_count += len(batch)
350
350
  update_master_log(json_path, sent_count - 1)
351
351
  locations[str(time.time())] = location
352
- with open(location_path, 'w') as f:
352
+ with open(location_path, 'w', encoding='utf-8') as f:
353
353
  json.dump(locations, f, indent=2)
354
354
  # Do NOT overwrite the input JSON file
355
355
  logger.info(f'Simulation sent, location(s) saved: {location}')
@@ -370,7 +370,7 @@ def result_worker(session, location_path, result_path, poll_interval=30):
370
370
  if not os.path.exists(location_path):
371
371
  time.sleep(poll_interval)
372
372
  continue
373
- with open(location_path, 'r') as f:
373
+ with open(location_path, 'r', encoding='utf-8') as f:
374
374
  locations = json.load(f)
375
375
  for loc_key, loc_val in locations.items():
376
376
  if loc_key in results:
@@ -418,7 +418,7 @@ def result_worker(session, location_path, result_path, poll_interval=30):
418
418
  alpha = session.get(f"https://api.worldquantbrain.com/alphas/{alpha_id}")
419
419
  results[loc_key] = alpha.json()
420
420
  logger.info(f"Result fetched for location {loc_val}")
421
- with open(result_path, 'w') as f:
421
+ with open(result_path, 'w', encoding='utf-8') as f:
422
422
  json.dump(results, f, indent=2)
423
423
  except Exception as e:
424
424
  logger.error(f'Error fetching result for {loc_val}: {e}')
@@ -122,7 +122,7 @@ def save_simulation_result(result: dict) -> None:
122
122
 
123
123
  os.makedirs(folder_path, exist_ok=True)
124
124
 
125
- with open(file_path, "w") as file:
125
+ with open(file_path, "w", encoding="utf-8") as file:
126
126
  json.dump(result, file)
127
127
 
128
128
 
@@ -94,7 +94,7 @@ def get_json_filepath():
94
94
  # Verify file exists
95
95
  if os.path.exists(json_path):
96
96
  try:
97
- with open(json_path, 'r') as f:
97
+ with open(json_path, 'r', encoding='utf-8') as f:
98
98
  data = json.load(f)
99
99
  print(f"✓ 成功加载 JSON 文件: {json_path}")
100
100
  return json_path, data
@@ -122,7 +122,7 @@ def save_simulation_result(result: dict) -> None:
122
122
 
123
123
  os.makedirs(folder_path, exist_ok=True)
124
124
 
125
- with open(file_path, "w") as file:
125
+ with open(file_path, "w", encoding="utf-8") as file:
126
126
  json.dump(result, file)
127
127
 
128
128
 
@@ -1376,7 +1376,7 @@ def load_config() -> Dict[str, Any]:
1376
1376
  config_file = _resolve_config_path()
1377
1377
  if os.path.exists(config_file):
1378
1378
  try:
1379
- with open(config_file, 'r') as f:
1379
+ with open(config_file, 'r', encoding='utf-8') as f:
1380
1380
  return json.load(f)
1381
1381
  except (IOError, json.JSONDecodeError) as e:
1382
1382
  logger.error(f"Error loading config file {config_file}: {e}")
@@ -1390,7 +1390,7 @@ def save_config(config: Dict[str, Any]):
1390
1390
  """
1391
1391
  config_file = _resolve_config_path(for_write=True)
1392
1392
  try:
1393
- with open(config_file, 'w') as f:
1393
+ with open(config_file, 'w', encoding='utf-8') as f:
1394
1394
  json.dump(config, f, indent=2)
1395
1395
  except IOError as e:
1396
1396
  logger.error(f"Error saving config file to {config_file}: {e}")
@@ -2093,7 +2093,7 @@ async def save_simulation_data(simulation_id: str, filename: str) -> Dict[str, A
2093
2093
  simulation_data = await brain_client.get_simulation_status(simulation_id)
2094
2094
 
2095
2095
  # Save to file
2096
- with open(filename, 'w') as f:
2096
+ with open(filename, 'w', encoding='utf-8') as f:
2097
2097
  json.dump(simulation_data, f, indent=2)
2098
2098
 
2099
2099
  return {"success": True, "filename": filename, "simulation_id": simulation_id}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cnhkmcp
3
- Version: 2.1.1
3
+ Version: 2.1.2
4
4
  Summary: A comprehensive Model Context Protocol (MCP) server for quantitative trading platform integration
5
5
  Home-page: https://github.com/cnhk/cnhkmcp
6
6
  Author: CNHK
@@ -2,7 +2,7 @@ cnhkmcp/__init__.py,sha256=6HAUbLyRJK-E8TxYkbuGHZ57oAxeH8LgrUVK19ttNhY,2758
2
2
  cnhkmcp/untracked/arXiv_API_Tool_Manual.md,sha256=I3hvI5mpmIjBuWptufoVSWFnuhyUc67oCGHEuR0p_xs,13552
3
3
  cnhkmcp/untracked/arxiv_api.py,sha256=-E-Ub9K-DXAYaCjrbobyfQ9H97gaZBc7pL6xPEyVHec,9020
4
4
  cnhkmcp/untracked/forum_functions.py,sha256=QW-CplAsqDkw-Wcwq-1tuZBq48dEO-vXZ8xw7X65EuE,42303
5
- cnhkmcp/untracked/platform_functions.py,sha256=3tXQnlYlXJyrYdEvys6q-4L-PMAk2sOVkBlntsbB4Ig,123322
5
+ cnhkmcp/untracked/platform_functions.py,sha256=M2U2BWXZsqLBCFlZo_wv-UARDn4bNEM598db7WeBL8I,123340
6
6
  cnhkmcp/untracked/sample_mcp_config.json,sha256=QSFvZ086bxUQsvmLjcE6pL9ObzKn4FGnt9npWPo7Eps,1044
7
7
  cnhkmcp/untracked/user_config.json,sha256=_INn1X1qIsITrmEno-BRlQOAGm9wnNCw-6B333DEvnk,695
8
8
  cnhkmcp/untracked/示例参考文档_BRAIN_Alpha_Test_Requirements_and_Tips.md,sha256=W4dtQrqoTN72UyvIsvkGRF0HFOJLHSDeeSlbR3gqQg0,17133
@@ -18,27 +18,23 @@ cnhkmcp/untracked/AI桌面插件/config.json,sha256=flNYMbGeZ074viP30_wq3QieW143
18
18
  cnhkmcp/untracked/AI桌面插件/icon.ico,sha256=oPkiKlN_f8zXTZhBmr6BVGjxfWr_wdIe5Flgq02wp-s,3284
19
19
  cnhkmcp/untracked/AI桌面插件/icon.png,sha256=xg3EfFWiXzibPH38JCC0xpQMfGH6asOZbT0lWFVzl00,1628
20
20
  cnhkmcp/untracked/AI桌面插件/main.py,sha256=Jyj1fwwhPRkVKgQakRV3qY1ggzjy8eZX3VNuNp6QCG4,23015
21
- cnhkmcp/untracked/AI桌面插件/process_knowledge_base.py,sha256=kCjmB8Ib5JOawha-r7dDrR-SeBc21YeLz9coP_KQK-M,9537
22
- cnhkmcp/untracked/AI桌面插件/rag_engine.py,sha256=zg4HL_lA4FuZ36yDoOzXuPQ6Elhoa4DSdc00ieF-jPM,15947
23
- cnhkmcp/untracked/AI桌面插件/requirements.txt,sha256=aa9upfFEGzSJ2dxGGLgv_w9nO1BU0YcqJ_3CrhiKP_8,81
21
+ cnhkmcp/untracked/AI桌面插件/process_knowledge_base.py,sha256=hKY3Uus81cfhBIxuCPiOV-5RDNAM3NxQ_VD0T3BezwQ,10073
22
+ cnhkmcp/untracked/AI桌面插件/rag_engine.py,sha256=fQiA817OOv9dNkNfR6M7oiXz2WaNZ78mEgwcnrF946E,18865
23
+ cnhkmcp/untracked/AI桌面插件/requirements.txt,sha256=jLAbyxGnvi_f05YakzmC_vfVUbfaBUmHBULAmKl0N2o,82
24
24
  cnhkmcp/untracked/AI桌面插件/run.bat,sha256=6Zj0DvBzq86mcptoxWPjmIQms8hYboMWDECM3xA8cr4,32
25
25
  cnhkmcp/untracked/AI桌面插件/首次运行打开我.py,sha256=TA5lFBAAKNvGmKwomzW6N8zkVj2QC5SR0CLXUrRVDuw,9570
26
26
  cnhkmcp/untracked/AI桌面插件/get_knowledgeBase_tool/ace_lib.py,sha256=z-6PCt3z3gJ_2pO9-17dtKkiSfrXPLv-WE-Ff85fqiw,53749
27
27
  cnhkmcp/untracked/AI桌面插件/get_knowledgeBase_tool/fetch_all_datasets.py,sha256=NoRN5NoouW62Us1RvsbyTkgGu0w9ZWkARA1j4gDO5EY,5366
28
28
  cnhkmcp/untracked/AI桌面插件/get_knowledgeBase_tool/fetch_all_documentation.py,sha256=PUviCKtUkGlgyVxznZq5y_wEnn32S9-dzlN91tbbWYs,5260
29
29
  cnhkmcp/untracked/AI桌面插件/get_knowledgeBase_tool/fetch_all_operators.py,sha256=CYvLDqSsKv9xcsm2TSbQoDlKr6-g1ixlXkB6qAIxtJg,2934
30
- cnhkmcp/untracked/AI桌面插件/get_knowledgeBase_tool/helpful_functions.py,sha256=VS-rh4T2CMWLFK4clI7-DYsnqwO5dx5L-rgL8U8BmT8,6622
30
+ cnhkmcp/untracked/AI桌面插件/get_knowledgeBase_tool/helpful_functions.py,sha256=Jc8gclRkvGx7el0XgTmZ67qwMSHrSluYheCUvzd6mbg,6640
31
31
  cnhkmcp/untracked/AI桌面插件/knowledge/test.txt,sha256=ZnOpCeIdB6pBvv-0-nq6EjJtwrVsBnj9nDuxwFVYC-E,82
32
- cnhkmcp/untracked/AI桌面插件/vector_db/_manifest.json,sha256=Vd4s9aqs9zBGLtnKZslDYK_6oKBp5mdfS-SD2_6QP-g,8944
33
- cnhkmcp/untracked/AI桌面插件/vector_db/_meta.json,sha256=xQwhRA0prLtwFAMAGR1ZgCR76jVEFIEfF-1SNPpTMgI,91
34
- cnhkmcp/untracked/AI桌面插件/vector_db/chroma.sqlite3,sha256=TbpSe6HP7_otdpBvTGLeA-H1jMXHFqDsBPOR1iTrAAY,33038336
35
- cnhkmcp/untracked/AI桌面插件/vector_db/be5d957c-b724-46e3-91d1-999e9f5f7d28/index_metadata.pickle,sha256=osKuppyoFnSH_ZHoftXx1h3DIu8NhiOON3YDtM2Kamk,515210
36
32
  cnhkmcp/untracked/APP/.gitignore,sha256=oPCoVTNo82bhkN0c671LdjCpOTVpVhZI5NR75ztcg48,317
37
33
  cnhkmcp/untracked/APP/MODULAR_STRUCTURE.md,sha256=b5xV74-_RtXq2K1EsYDwMukO6lxjJ4-lnOAEnTHpFS0,4706
38
34
  cnhkmcp/untracked/APP/README.md,sha256=vb7hmQX0sH5aFNBmDCN5szMSDHm1_h2VKY4UKCt0aMk,11676
39
35
  cnhkmcp/untracked/APP/ace.log,sha256=HXNY3zIjxRA2b4xcRkQbX5qGdrb0Jk3lI-YnkMSVTow,11623
40
36
  cnhkmcp/untracked/APP/ace_lib.py,sha256=z-6PCt3z3gJ_2pO9-17dtKkiSfrXPLv-WE-Ff85fqiw,53749
41
- cnhkmcp/untracked/APP/helpful_functions.py,sha256=VS-rh4T2CMWLFK4clI7-DYsnqwO5dx5L-rgL8U8BmT8,6622
37
+ cnhkmcp/untracked/APP/helpful_functions.py,sha256=Jc8gclRkvGx7el0XgTmZ67qwMSHrSluYheCUvzd6mbg,6640
42
38
  cnhkmcp/untracked/APP/mirror_config.txt,sha256=RL1jFYwcvDPkLd6tc_lqVcwjTLORWt5Qu0Ym_BTPaao,504
43
39
  cnhkmcp/untracked/APP/operaters.csv,sha256=g8m6z-u0x-CoqGFSp_g3UMyLJkdFLE5UwsNkueMH1yw,13610
44
40
  cnhkmcp/untracked/APP/requirements.txt,sha256=T7qwWI9C916AVkKpYWxbcCCCX8iPL7vu3Ha_exnltRY,859
@@ -49,10 +45,10 @@ cnhkmcp/untracked/APP/setup_tsinghua.sh,sha256=mMDXTqCRIXtSHa_1pU0jCnNF-xajqfZDl
49
45
  cnhkmcp/untracked/APP/ssrn-3332513.pdf,sha256=GEwf1Srtk-fTvF03dhTEjXJstHBARIUg31k7s5kxS98,2082078
50
46
  cnhkmcp/untracked/APP/usage.md,sha256=lPpA6qqAMvVsm41ikbRR1ZWFcuPSgqhMXOUig52eZCI,16164
51
47
  cnhkmcp/untracked/APP/运行打开我.py,sha256=BMCOXXpywp0uUwSsUAL-UVXV3MLKnXy34sgAywfjfBk,99937
52
- cnhkmcp/untracked/APP/Tranformer/Transformer.py,sha256=Rc13OG8nVx22sfr7UpbwxLxU5b-9xhIVyKDqx20W33U,188808
48
+ cnhkmcp/untracked/APP/Tranformer/Transformer.py,sha256=cq9JYZv-s2kN3PbNj0Fk7L89ig74swhFMxgWEwQRZwA,188826
53
49
  cnhkmcp/untracked/APP/Tranformer/ace.log,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
50
  cnhkmcp/untracked/APP/Tranformer/ace_lib.py,sha256=z-6PCt3z3gJ_2pO9-17dtKkiSfrXPLv-WE-Ff85fqiw,53749
55
- cnhkmcp/untracked/APP/Tranformer/helpful_functions.py,sha256=VS-rh4T2CMWLFK4clI7-DYsnqwO5dx5L-rgL8U8BmT8,6622
51
+ cnhkmcp/untracked/APP/Tranformer/helpful_functions.py,sha256=Jc8gclRkvGx7el0XgTmZ67qwMSHrSluYheCUvzd6mbg,6640
56
52
  cnhkmcp/untracked/APP/Tranformer/parsetab.py,sha256=29clH5xFEmKpqzRvrLN89QE8JFJNYFhH-gEFR4y7448,7650
57
53
  cnhkmcp/untracked/APP/Tranformer/template_summary.txt,sha256=gWz5LFlPtOmw0JnLj68hgWsoSFNyA18uCJWIYWHLbe8,111054
58
54
  cnhkmcp/untracked/APP/Tranformer/transformer_config.json,sha256=q3Io_1o_BAt-2GNAUKgCs_YR3exkiGFnUqykiPUC1jw,191
@@ -73,13 +69,13 @@ cnhkmcp/untracked/APP/give_me_idea/ace_lib.py,sha256=z-6PCt3z3gJ_2pO9-17dtKkiSfr
73
69
  cnhkmcp/untracked/APP/give_me_idea/alpha_data_specific_template_master.py,sha256=3RXg5vcwl-YpuMwRuv4TLr2DCOmxxA0Z3n2DgRAlf9s,9006
74
70
  cnhkmcp/untracked/APP/give_me_idea/fetch_all_datasets.py,sha256=NoRN5NoouW62Us1RvsbyTkgGu0w9ZWkARA1j4gDO5EY,5366
75
71
  cnhkmcp/untracked/APP/give_me_idea/fetch_all_operators.py,sha256=CYvLDqSsKv9xcsm2TSbQoDlKr6-g1ixlXkB6qAIxtJg,2934
76
- cnhkmcp/untracked/APP/give_me_idea/helpful_functions.py,sha256=VS-rh4T2CMWLFK4clI7-DYsnqwO5dx5L-rgL8U8BmT8,6622
72
+ cnhkmcp/untracked/APP/give_me_idea/helpful_functions.py,sha256=Jc8gclRkvGx7el0XgTmZ67qwMSHrSluYheCUvzd6mbg,6640
77
73
  cnhkmcp/untracked/APP/give_me_idea/what_is_Alpha_template.md,sha256=QjwX0_b0DhhiNlo3ZwkIfXXSsJnk_FyjkZftyVwnCZ8,2317
78
- cnhkmcp/untracked/APP/hkSimulator/ace_lib.py,sha256=2AM67BKth4foMWkCdsz1CkOrSophtEFs7iatWqP8CVo,53481
79
- cnhkmcp/untracked/APP/hkSimulator/autosimulator.py,sha256=JmhmboakiDDQetWmn8t0-k6dgbbm0T3i-W6imVwuuGA,18641
80
- cnhkmcp/untracked/APP/hkSimulator/helpful_functions.py,sha256=VS-rh4T2CMWLFK4clI7-DYsnqwO5dx5L-rgL8U8BmT8,6622
74
+ cnhkmcp/untracked/APP/hkSimulator/ace_lib.py,sha256=46Mi2zMEmLXw53ixRjGiyXyBNYe5QAxzZlqYJ84Vsb4,53517
75
+ cnhkmcp/untracked/APP/hkSimulator/autosimulator.py,sha256=KHiwi4tCoLeOKg-14ZW06g2Z4mfG6kj1vWzFWH2VUrM,18713
76
+ cnhkmcp/untracked/APP/hkSimulator/helpful_functions.py,sha256=Jc8gclRkvGx7el0XgTmZ67qwMSHrSluYheCUvzd6mbg,6640
81
77
  cnhkmcp/untracked/APP/simulator/alpha_submitter.py,sha256=Wxx7DyFr0VwH709JhxXPj1Jiz8dShaVaYar3HTVMgv4,15652
82
- cnhkmcp/untracked/APP/simulator/simulator_wqb.py,sha256=NBvQKf4cYSQgknF0mrrYjt_x9E8w69lavSdxPqq2sCA,23756
78
+ cnhkmcp/untracked/APP/simulator/simulator_wqb.py,sha256=NsPSTs9jmVRguN5_y3qJRNr3hSFUs9JFQgh6y_mxyHw,23774
83
79
  cnhkmcp/untracked/APP/static/brain.js,sha256=pIRQdvyx6_LFzgRLiYfRhInr2WHd5dEjfM57-9VzUjg,20533
84
80
  cnhkmcp/untracked/APP/static/decoder.js,sha256=sKyfgsleHvokCMw6Zp-XfcjixkT8cyMvw6ASDhN6b9I,61987
85
81
  cnhkmcp/untracked/APP/static/feature_engineering.js,sha256=X6nis3FmytrNAE8mM-U6-VqmqZMAQ2X59Iy9RmOF5_8,73895
@@ -101,15 +97,15 @@ cnhkmcp/untracked/APP/templates/simulator.html,sha256=dPAY-fT7oEDCW945ZLLPSUPoaw
101
97
  cnhkmcp/untracked/APP/templates/transformer_web.html,sha256=de5fNhtbdsMMyW5cygvvvW9vSJL81O2A_QPqTmr0usc,27513
102
98
  cnhkmcp/untracked/APP/缘分一道桥/ace_lib.py,sha256=z-6PCt3z3gJ_2pO9-17dtKkiSfrXPLv-WE-Ff85fqiw,53749
103
99
  cnhkmcp/untracked/APP/缘分一道桥/brain_alpha_inspector.py,sha256=KjU6PNyD1Xfqqa0IGKM-VFH_p_4RKBzhmGQczM14KiM,24703
104
- cnhkmcp/untracked/APP/缘分一道桥/helpful_functions.py,sha256=VS-rh4T2CMWLFK4clI7-DYsnqwO5dx5L-rgL8U8BmT8,6622
100
+ cnhkmcp/untracked/APP/缘分一道桥/helpful_functions.py,sha256=Jc8gclRkvGx7el0XgTmZ67qwMSHrSluYheCUvzd6mbg,6640
105
101
  cnhkmcp/untracked/mcp文件论坛版2_如果原版启动不了浏览器就试这个/forum_functions.py,sha256=VuyUaguA0OjJbVRN5Vy8UEFXSAviS3jhDSRWyyPOtfo,18975
106
- cnhkmcp/untracked/mcp文件论坛版2_如果原版启动不了浏览器就试这个/platform_functions.py,sha256=aa5jCRjCORFars8CVahqwbk5ni0mLYWu9yND7Z4TGUM,99159
102
+ cnhkmcp/untracked/mcp文件论坛版2_如果原版启动不了浏览器就试这个/platform_functions.py,sha256=pkf6vXIztnPg7uJYgxPir_dqHaR9JEbqu-Qw1YJm6oU,99195
107
103
  cnhkmcp/untracked/mcp文件论坛版2_如果原版启动不了浏览器就试这个/user_config.json,sha256=_INn1X1qIsITrmEno-BRlQOAGm9wnNCw-6B333DEvnk,695
108
104
  cnhkmcp/untracked/mcp文件论坛版2_如果原版启动不了浏览器就试这个/让AI读这个文档来学会下载浏览器.md,sha256=v5QPSMjRDh52ZjgC4h8QjImnaqlVRLjTHGxmGjTo36g,3396
109
105
  cnhkmcp/untracked/mcp文件论坛版2_如果原版启动不了浏览器就试这个/配置前运行我_安装必要依赖包.py,sha256=BnUyL5g6PaC62yEuS-8vcDSJ0oKu3k6jqQxi2jginuQ,6612
110
- cnhkmcp-2.1.1.dist-info/licenses/LICENSE,sha256=QLxO2eNMnJQEdI_R1UV2AOD-IvuA8zVrkHWA4D9gtoc,1081
111
- cnhkmcp-2.1.1.dist-info/METADATA,sha256=cukLyRXoYZM2-vv-RUlxEMy0KyOC5nyUkBiDlXAFS5o,5171
112
- cnhkmcp-2.1.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
113
- cnhkmcp-2.1.1.dist-info/entry_points.txt,sha256=lTQieVyIvjhSMK4fT-XwnccY-JBC1H4vVQ3V9dDM-Pc,70
114
- cnhkmcp-2.1.1.dist-info/top_level.txt,sha256=x--ibUcSgOS9Z_RWK2Qc-vfs7DaXQN-WMaaxEETJ1Bw,8
115
- cnhkmcp-2.1.1.dist-info/RECORD,,
106
+ cnhkmcp-2.1.2.dist-info/licenses/LICENSE,sha256=QLxO2eNMnJQEdI_R1UV2AOD-IvuA8zVrkHWA4D9gtoc,1081
107
+ cnhkmcp-2.1.2.dist-info/METADATA,sha256=RwgIoT95SQ_WKAusgghseGmAn4b1XKByDlVqjojNyyg,5171
108
+ cnhkmcp-2.1.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
109
+ cnhkmcp-2.1.2.dist-info/entry_points.txt,sha256=lTQieVyIvjhSMK4fT-XwnccY-JBC1H4vVQ3V9dDM-Pc,70
110
+ cnhkmcp-2.1.2.dist-info/top_level.txt,sha256=x--ibUcSgOS9Z_RWK2Qc-vfs7DaXQN-WMaaxEETJ1Bw,8
111
+ cnhkmcp-2.1.2.dist-info/RECORD,,
@@ -1,326 +0,0 @@
1
- {
2
- "001_10_Steps_to_Start_on_BRAIN_documentation.json": {
3
- "mtime": 1767187226.1783848,
4
- "size": 2483
5
- },
6
- "001_Intermediate_Pack_-_Improve_your_Alpha_2_2_documentation.json": {
7
- "mtime": 1767187225.6750722,
8
- "size": 9295
9
- },
10
- "001_Intermediate_Pack_-_Understand_Results_1_2_documentation.json": {
11
- "mtime": 1767187225.1160033,
12
- "size": 9758
13
- },
14
- "001_Introduction_to_Alphas_documentation.json": {
15
- "mtime": 1767187223.7560995,
16
- "size": 11156
17
- },
18
- "001_Introduction_to_BRAIN_Expression_Language_documentation.json": {
19
- "mtime": 1767187224.277528,
20
- "size": 4925
21
- },
22
- "001_WorldQuant_Challenge_documentation.json": {
23
- "mtime": 1767187226.6850047,
24
- "size": 4605
25
- },
26
- "001__Read_this_First_-_Starter_Pack_documentation.json": {
27
- "mtime": 1767187223.2536645,
28
- "size": 31286
29
- },
30
- "002_How_to_choose_the_Simulation_Settings_documentation.json": {
31
- "mtime": 1767187228.2257776,
32
- "size": 13775
33
- },
34
- "002_Simulate_your_first_Alpha_documentation.json": {
35
- "mtime": 1767187227.7143147,
36
- "size": 8701
37
- },
38
- "002__Alpha_Examples_for_Beginners_documentation.json": {
39
- "mtime": 1767187229.1900375,
40
- "size": 10826
41
- },
42
- "002__Alpha_Examples_for_Bronze_Users_documentation.json": {
43
- "mtime": 1767187229.664476,
44
- "size": 4795
45
- },
46
- "002__Alpha_Examples_for_Silver_Users_documentation.json": {
47
- "mtime": 1767187230.1425803,
48
- "size": 3854
49
- },
50
- "002__How_BRAIN_works_documentation.json": {
51
- "mtime": 1767187228.7194252,
52
- "size": 16929
53
- },
54
- "003_Clear_these_tests_before_submitting_an_Alpha_documentation.json": {
55
- "mtime": 1767187231.1392078,
56
- "size": 24609
57
- },
58
- "003_Parameters_in_the_Simulation_results_documentation.json": {
59
- "mtime": 1767187231.8167577,
60
- "size": 16168
61
- },
62
- "004_Group_Data_Fields_documentation.json": {
63
- "mtime": 1767187234.323853,
64
- "size": 4663
65
- },
66
- "004_How_to_use_the_Data_Explorer_documentation.json": {
67
- "mtime": 1767187233.2908292,
68
- "size": 6196
69
- },
70
- "004_Model77_dataset_documentation.json": {
71
- "mtime": 1767187234.7999012,
72
- "size": 3274
73
- },
74
- "004_Sentiment1_dataset_documentation.json": {
75
- "mtime": 1767187235.2783144,
76
- "size": 2058
77
- },
78
- "004_Understanding_Data_in_BRAIN_Key_Concepts_and_Tips_documentation.json": {
79
- "mtime": 1767187232.782544,
80
- "size": 9173
81
- },
82
- "004_Vector_Data_Fields_documentation.json": {
83
- "mtime": 1767187233.8419762,
84
- "size": 6058
85
- },
86
- "005_Crowding_Risk-Neutralized_Alphas_documentation.json": {
87
- "mtime": 1767187239.4696057,
88
- "size": 3404
89
- },
90
- "005_D0_documentation.json": {
91
- "mtime": 1767187237.8772166,
92
- "size": 5894
93
- },
94
- "005_Double_Neutralization_documentation.json": {
95
- "mtime": 1767187237.2801647,
96
- "size": 3308
97
- },
98
- "005_Fast_D1_Documentation_documentation.json": {
99
- "mtime": 1767187244.849411,
100
- "size": 11325
101
- },
102
- "005_Investability_Constrained_Metrics_documentation.json": {
103
- "mtime": 1767187244.361986,
104
- "size": 7971
105
- },
106
- "005_Must-read_posts_How_to_improve_your_Alphas_documentation.json": {
107
- "mtime": 1767187236.2457168,
108
- "size": 2781
109
- },
110
- "005_Neutralization_documentation.json": {
111
- "mtime": 1767187236.7823203,
112
- "size": 8962
113
- },
114
- "005_RAM_Risk-Neutralized_Alphas_documentation.json": {
115
- "mtime": 1767187240.0576506,
116
- "size": 4121
117
- },
118
- "005_Risk_Neutralization_Default_setting_documentation.json": {
119
- "mtime": 1767187238.9658227,
120
- "size": 3796
121
- },
122
- "005_Risk_Neutralized_Alphas_documentation.json": {
123
- "mtime": 1767187238.4253874,
124
- "size": 11617
125
- },
126
- "005_Statistical_Risk-Neutralized_Alphas_documentation.json": {
127
- "mtime": 1767187240.5924916,
128
- "size": 5663
129
- },
130
- "006_EUR_TOP2500_Universe_documentation.json": {
131
- "mtime": 1767187247.5703416,
132
- "size": 2248
133
- },
134
- "006_Getting_Started_China_Research_for_Consultants_Gold_documentation.json": {
135
- "mtime": 1767187248.0169055,
136
- "size": 8247
137
- },
138
- "006_Getting_started_on_Illiquid_Universes_Gold_documentation.json": {
139
- "mtime": 1767187247.1297894,
140
- "size": 2224
141
- },
142
- "006_Getting_started_with_USA_TOPSP500_universe_Gold_documentation.json": {
143
- "mtime": 1767187246.6729863,
144
- "size": 2681
145
- },
146
- "006_GLB_TOPDIV3000_Universe_documentation.json": {
147
- "mtime": 1767187248.4504318,
148
- "size": 2990
149
- },
150
- "006_Global_Alphas_Gold_documentation.json": {
151
- "mtime": 1767187246.0390387,
152
- "size": 5531
153
- },
154
- "006_India_Alphas_documentation.json": {
155
- "mtime": 1767187248.8769279,
156
- "size": 1845
157
- },
158
- "007_Consultant_Dos_and_Don_ts_documentation.json": {
159
- "mtime": 1767187252.9861574,
160
- "size": 6318
161
- },
162
- "007_Consultant_Features_documentation.json": {
163
- "mtime": 1767187251.6508195,
164
- "size": 16785
165
- },
166
- "007_Consultant_Simulation_Features_documentation.json": {
167
- "mtime": 1767187251.177272,
168
- "size": 7226
169
- },
170
- "007_Consultant_Submission_Tests_documentation.json": {
171
- "mtime": 1767187252.1096609,
172
- "size": 17040
173
- },
174
- "007_Finding_Consultant_Alphas_documentation.json": {
175
- "mtime": 1767187250.6579707,
176
- "size": 24560
177
- },
178
- "007_Power_Pool_Alphas_documentation.json": {
179
- "mtime": 1767187250.1653683,
180
- "size": 6398
181
- },
182
- "007_Research_Advisory_Program_documentation.json": {
183
- "mtime": 1767187253.4177225,
184
- "size": 2637
185
- },
186
- "007_Starting_Guide_for_Research_Consultants_documentation.json": {
187
- "mtime": 1767187249.7283993,
188
- "size": 3245
189
- },
190
- "007_Visualization_Tool_documentation.json": {
191
- "mtime": 1767187252.5484178,
192
- "size": 5744
193
- },
194
- "007_Your_Advisor_-_Kunqi_Jiang_documentation.json": {
195
- "mtime": 1767187253.8533716,
196
- "size": 4746
197
- },
198
- "007__Brain_Genius_documentation.json": {
199
- "mtime": 1767187257.3133671,
200
- "size": 16460
201
- },
202
- "007__Single_Dataset_Alphas_documentation.json": {
203
- "mtime": 1767187256.8453045,
204
- "size": 4113
205
- },
206
- "008_Advisory_Theme_Calendar_documentation.json": {
207
- "mtime": 1767187259.502196,
208
- "size": 584
209
- },
210
- "008_Multiplier_Rules_documentation.json": {
211
- "mtime": 1767187258.6333888,
212
- "size": 2178
213
- },
214
- "008_Overview_of_Themes_documentation.json": {
215
- "mtime": 1767187258.1852746,
216
- "size": 1784
217
- },
218
- "008_Theme_Calendar_documentation.json": {
219
- "mtime": 1767187259.0729353,
220
- "size": 8936
221
- },
222
- "009_Combo_Expression_documentation.json": {
223
- "mtime": 1767187261.4117863,
224
- "size": 10902
225
- },
226
- "009_Global_SuperAlphas_documentation.json": {
227
- "mtime": 1767187263.181562,
228
- "size": 3717
229
- },
230
- "009_Helpful_Tips_documentation.json": {
231
- "mtime": 1767187262.3033876,
232
- "size": 3618
233
- },
234
- "009_Selection_Expression_documentation.json": {
235
- "mtime": 1767187260.918844,
236
- "size": 47119
237
- },
238
- "009_SuperAlpha_Operators_documentation.json": {
239
- "mtime": 1767187262.748049,
240
- "size": 18735
241
- },
242
- "009_SuperAlpha_Results_documentation.json": {
243
- "mtime": 1767187261.8593862,
244
- "size": 3838
245
- },
246
- "009_What_is_a_SuperAlpha_documentation.json": {
247
- "mtime": 1767187260.3861206,
248
- "size": 13234
249
- },
250
- "010_BRAIN_API_documentation.json": {
251
- "mtime": 1767187264.5490012,
252
- "size": 36407
253
- },
254
- "010_Documentation_for_ACE_API_Library_Gold_documentation.json": {
255
- "mtime": 1767187264.9810457,
256
- "size": 1660
257
- },
258
- "010__Understanding_simulation_limits_documentation.json": {
259
- "mtime": 1767187264.0576608,
260
- "size": 12013
261
- },
262
- "arithmetic_operators.json": {
263
- "mtime": 1767187265.4527013,
264
- "size": 5302
265
- },
266
- "asi_datasets.json": {
267
- "mtime": 1767187329.8343418,
268
- "size": 241709
269
- },
270
- "chn_datasets.json": {
271
- "mtime": 1767187329.8464715,
272
- "size": 112139
273
- },
274
- "cross_sectional_operators.json": {
275
- "mtime": 1767187265.454866,
276
- "size": 3393
277
- },
278
- "eur_datasets.json": {
279
- "mtime": 1767187329.8725038,
280
- "size": 268981
281
- },
282
- "glb_datasets.json": {
283
- "mtime": 1767187329.8975177,
284
- "size": 222049
285
- },
286
- "group_operators.json": {
287
- "mtime": 1767187265.4558804,
288
- "size": 4058
289
- },
290
- "ind_datasets.json": {
291
- "mtime": 1767187329.930282,
292
- "size": 259819
293
- },
294
- "logical_operators.json": {
295
- "mtime": 1767187265.4568646,
296
- "size": 3549
297
- },
298
- "reduce_operators.json": {
299
- "mtime": 1767187265.4588814,
300
- "size": 8602
301
- },
302
- "special_operators.json": {
303
- "mtime": 1767188164.9683514,
304
- "size": 893
305
- },
306
- "test.txt": {
307
- "mtime": 1767174050.546388,
308
- "size": 82
309
- },
310
- "time_series_operators.json": {
311
- "mtime": 1767187265.4623148,
312
- "size": 12968
313
- },
314
- "transformational_operators.json": {
315
- "mtime": 1767187265.4623148,
316
- "size": 2080
317
- },
318
- "usa_datasets.json": {
319
- "mtime": 1767187329.9605117,
320
- "size": 402280
321
- },
322
- "vector_operators.json": {
323
- "mtime": 1767187265.4637623,
324
- "size": 997
325
- }
326
- }
@@ -1 +0,0 @@
1
- {"model": "jinaai/jina-embeddings-v2-base-zh", "embed_dim": 768, "chroma_version": "1.4.0"}