cnhkmcp 2.0.3__tar.gz → 2.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. {cnhkmcp-2.0.3/cnhkmcp.egg-info → cnhkmcp-2.1.0}/PKG-INFO +1 -1
  2. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/README.md +38 -0
  3. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/config.json +6 -0
  4. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/get_knowledgeBase_tool/fetch_all_datasets.py +157 -0
  5. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/get_knowledgeBase_tool/fetch_all_documentation.py +132 -0
  6. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/get_knowledgeBase_tool/fetch_all_operators.py +99 -0
  7. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/icon.ico +0 -0
  8. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/icon.png +0 -0
  9. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/knowledge/test.txt +1 -0
  10. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/main.py +581 -0
  11. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/process_knowledge_base.py +280 -0
  12. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/rag_engine.py +265 -0
  13. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/requirements.txt +12 -0
  14. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/run.bat +3 -0
  15. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/vector_db/chroma.sqlite3 +0 -0
  16. cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266//351/246/226/346/254/241/350/277/220/350/241/214/346/211/223/345/274/200/346/210/221.py +265 -0
  17. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/Tranformer/Transformer.py +2804 -11
  18. cnhkmcp-2.1.0/cnhkmcp/untracked/APP/Tranformer/output/Alpha_candidates.json +2421 -0
  19. cnhkmcp-2.1.0/cnhkmcp/untracked/APP/Tranformer/output/Alpha_generated_expressions_error.json +1034 -0
  20. cnhkmcp-2.1.0/cnhkmcp/untracked/APP/Tranformer/output/Alpha_generated_expressions_success.json +444 -0
  21. cnhkmcp-2.1.0/cnhkmcp/untracked/APP/Tranformer/template_summary.txt +3182 -0
  22. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/ace.log +2 -0
  23. cnhkmcp-2.1.0/cnhkmcp/untracked/APP/give_me_idea/fetch_all_datasets.py +157 -0
  24. cnhkmcp-2.1.0/cnhkmcp/untracked/APP/give_me_idea/fetch_all_operators.py +99 -0
  25. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/simulator/simulator_wqb.py +16 -16
  26. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/static/brain.js +61 -0
  27. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/static/script.js +140 -0
  28. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/templates/index.html +25 -4
  29. cnhkmcp-2.1.0/cnhkmcp/untracked/APP//347/274/230/345/210/206/344/270/200/351/201/223/346/241/245/ace_lib.py +1510 -0
  30. cnhkmcp-2.1.0/cnhkmcp/untracked/APP//347/274/230/345/210/206/344/270/200/351/201/223/346/241/245/helpful_functions.py +180 -0
  31. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP//350/277/220/350/241/214/346/211/223/345/274/200/346/210/221.py +70 -8
  32. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0/cnhkmcp.egg-info}/PKG-INFO +1 -1
  33. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp.egg-info/SOURCES.txt +19 -3
  34. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/setup.py +1 -1
  35. cnhkmcp-2.0.3/cnhkmcp/untracked/APP/Tranformer/output/Alpha_candidates.json +0 -1786
  36. cnhkmcp-2.0.3/cnhkmcp/untracked/APP/Tranformer/output/Alpha_generated_expressions_error.json +0 -261
  37. cnhkmcp-2.0.3/cnhkmcp/untracked/APP/Tranformer/output/Alpha_generated_expressions_success.json +0 -170
  38. cnhkmcp-2.0.3/cnhkmcp/untracked/APP/Tranformer/template_summary.txt +0 -408
  39. cnhkmcp-2.0.3/cnhkmcp/untracked/APP/hkSimulator/ace.log +0 -0
  40. cnhkmcp-2.0.3/cnhkmcp/untracked/APP/hkSimulator/autosim_20251205_145240.log +0 -0
  41. cnhkmcp-2.0.3/cnhkmcp/untracked/APP/hkSimulator/autosim_20251215_030103.log +0 -0
  42. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/LICENSE +0 -0
  43. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/MANIFEST.in +0 -0
  44. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/README.md +0 -0
  45. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/__init__.py +0 -0
  46. /cnhkmcp-2.0.3/cnhkmcp/untracked/APP/Tranformer/ace_lib.py → /cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/get_knowledgeBase_tool/ace_lib.py" +0 -0
  47. /cnhkmcp-2.0.3/cnhkmcp/untracked/APP/Tranformer/helpful_functions.py → /cnhkmcp-2.1.0/cnhkmcp/untracked/AI/346/241/214/351/235/242/346/217/222/344/273/266/get_knowledgeBase_tool/helpful_functions.py" +0 -0
  48. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/.gitignore +0 -0
  49. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/MODULAR_STRUCTURE.md +0 -0
  50. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/README.md +0 -0
  51. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/Tranformer/ace.log +0 -0
  52. {cnhkmcp-2.0.3/cnhkmcp/untracked/APP → cnhkmcp-2.1.0/cnhkmcp/untracked/APP/Tranformer}/ace_lib.py +0 -0
  53. {cnhkmcp-2.0.3/cnhkmcp/untracked/APP/give_me_idea → cnhkmcp-2.1.0/cnhkmcp/untracked/APP/Tranformer}/helpful_functions.py +0 -0
  54. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/Tranformer/output/Alpha_candidates_/347/244/272/344/276/213.json" +0 -0
  55. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/Tranformer/output/Alpha_generated_expressions_/347/244/272/344/276/213/345/217/257/347/233/264/346/216/245/350/275/275/345/205/245Machine_lib.json" +0 -0
  56. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/Tranformer/parsetab.py +0 -0
  57. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/Tranformer/transformer_config.json +0 -0
  58. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/Tranformer/validator.py +0 -0
  59. {cnhkmcp-2.0.3/cnhkmcp/untracked/APP/give_me_idea → cnhkmcp-2.1.0/cnhkmcp/untracked/APP}/ace_lib.py +0 -0
  60. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/blueprints/__init__.py +0 -0
  61. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/blueprints/feature_engineering.py +0 -0
  62. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/blueprints/idea_house.py +0 -0
  63. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/blueprints/inspiration_house.py +0 -0
  64. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/blueprints/paper_analysis.py +0 -0
  65. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/custom_templates/templates.json +0 -0
  66. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/give_me_idea/BRAIN_Alpha_Template_Expert_SystemPrompt.md +0 -0
  67. /cnhkmcp-2.0.3/cnhkmcp/untracked/APP//347/274/230/345/210/206/344/270/200/351/201/223/346/241/245/ace_lib.py" → /cnhkmcp-2.1.0/cnhkmcp/untracked/APP/give_me_idea/ace_lib.py +0 -0
  68. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/give_me_idea/alpha_data_specific_template_master.py +0 -0
  69. {cnhkmcp-2.0.3/cnhkmcp/untracked/APP → cnhkmcp-2.1.0/cnhkmcp/untracked/APP/give_me_idea}/helpful_functions.py +0 -0
  70. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/give_me_idea/what_is_Alpha_template.md +0 -0
  71. {cnhkmcp-2.0.3/cnhkmcp/untracked/APP/hkSimulator → cnhkmcp-2.1.0/cnhkmcp/untracked/APP}/helpful_functions.py +0 -0
  72. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/hkSimulator/ace_lib.py +0 -0
  73. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/hkSimulator/autosimulator.py +0 -0
  74. /cnhkmcp-2.0.3/cnhkmcp/untracked/APP//347/274/230/345/210/206/344/270/200/351/201/223/346/241/245/helpful_functions.py" → /cnhkmcp-2.1.0/cnhkmcp/untracked/APP/hkSimulator/helpful_functions.py +0 -0
  75. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/mirror_config.txt +0 -0
  76. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/operaters.csv +0 -0
  77. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/requirements.txt +0 -0
  78. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/run_app.bat +0 -0
  79. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/run_app.sh +0 -0
  80. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/setup_tsinghua.bat +0 -0
  81. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/setup_tsinghua.sh +0 -0
  82. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/simulator/alpha_submitter.py +0 -0
  83. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/ssrn-3332513.pdf +0 -0
  84. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/static/decoder.js +0 -0
  85. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/static/feature_engineering.js +0 -0
  86. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/static/idea_house.js +0 -0
  87. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/static/inspiration.js +0 -0
  88. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/static/inspiration_house.js +0 -0
  89. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/static/paper_analysis.js +0 -0
  90. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/static/simulator.js +0 -0
  91. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/static/styles.css +0 -0
  92. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/static/usage_widget.js +0 -0
  93. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/templates/alpha_inspector.html +0 -0
  94. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/templates/feature_engineering.html +0 -0
  95. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/templates/idea_house.html +0 -0
  96. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/templates/inspiration_house.html +0 -0
  97. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/templates/paper_analysis.html +0 -0
  98. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/templates/simulator.html +0 -0
  99. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/templates/transformer_web.html +0 -0
  100. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP/usage.md +0 -0
  101. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/APP//347/274/230/345/210/206/344/270/200/351/201/223/346/241/245/brain_alpha_inspector.py" +0 -0
  102. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/arXiv_API_Tool_Manual.md +0 -0
  103. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/arxiv_api.py +0 -0
  104. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/forum_functions.py +0 -0
  105. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/mcp/346/226/207/344/273/266/350/256/272/345/235/233/347/211/2102_/345/246/202/346/236/234/345/216/237/347/211/210/345/220/257/345/212/250/344/270/215/344/272/206/346/265/217/350/247/210/345/231/250/345/260/261/350/257/225/350/277/231/344/270/252/forum_functions.py" +0 -0
  106. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/mcp/346/226/207/344/273/266/350/256/272/345/235/233/347/211/2102_/345/246/202/346/236/234/345/216/237/347/211/210/345/220/257/345/212/250/344/270/215/344/272/206/346/265/217/350/247/210/345/231/250/345/260/261/350/257/225/350/277/231/344/270/252/platform_functions.py" +0 -0
  107. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/mcp/346/226/207/344/273/266/350/256/272/345/235/233/347/211/2102_/345/246/202/346/236/234/345/216/237/347/211/210/345/220/257/345/212/250/344/270/215/344/272/206/346/265/217/350/247/210/345/231/250/345/260/261/350/257/225/350/277/231/344/270/252/user_config.json" +0 -0
  108. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/mcp/346/226/207/344/273/266/350/256/272/345/235/233/347/211/2102_/345/246/202/346/236/234/345/216/237/347/211/210/345/220/257/345/212/250/344/270/215/344/272/206/346/265/217/350/247/210/345/231/250/345/260/261/350/257/225/350/277/231/344/270/252//350/256/251AI/350/257/273/350/277/231/344/270/252/346/226/207/346/241/243/346/235/245/345/255/246/344/274/232/344/270/213/350/275/275/346/265/217/350/247/210/345/231/250.md" +0 -0
  109. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/mcp/346/226/207/344/273/266/350/256/272/345/235/233/347/211/2102_/345/246/202/346/236/234/345/216/237/347/211/210/345/220/257/345/212/250/344/270/215/344/272/206/346/265/217/350/247/210/345/231/250/345/260/261/350/257/225/350/277/231/344/270/252//351/205/215/347/275/256/345/211/215/350/277/220/350/241/214/346/210/221_/345/256/211/350/243/205/345/277/205/350/246/201/344/276/235/350/265/226/345/214/205.py" +0 -0
  110. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/platform_functions.py +0 -0
  111. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/sample_mcp_config.json +0 -0
  112. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked/user_config.json +0 -0
  113. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked//347/244/272/344/276/213/345/217/202/350/200/203/346/226/207/346/241/243_BRAIN_Alpha_Test_Requirements_and_Tips.md" +0 -0
  114. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked//347/244/272/344/276/213/345/267/245/344/275/234/346/265/201_Alpha_explaination_workflow.md" +0 -0
  115. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked//347/244/272/344/276/213/345/267/245/344/275/234/346/265/201_BRAIN_6_Tips_Datafield_Exploration_Guide.md" +0 -0
  116. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked//347/244/272/344/276/213/345/267/245/344/275/234/346/265/201_BRAIN_Alpha_Improvement_Workflow.md" +0 -0
  117. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked//347/244/272/344/276/213/345/267/245/344/275/234/346/265/201_Dataset_Exploration_Expert_Manual.md" +0 -0
  118. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked//347/244/272/344/276/213/345/267/245/344/275/234/346/265/201_daily_report_workflow.md" +0 -0
  119. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp/untracked//351/205/215/347/275/256/345/211/215/350/277/220/350/241/214/346/210/221_/345/256/211/350/243/205/345/277/205/350/246/201/344/276/235/350/265/226/345/214/205.py" +0 -0
  120. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp.egg-info/dependency_links.txt +0 -0
  121. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp.egg-info/entry_points.txt +0 -0
  122. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp.egg-info/not-zip-safe +0 -0
  123. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp.egg-info/requires.txt +0 -0
  124. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/cnhkmcp.egg-info/top_level.txt +0 -0
  125. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/requirements.txt +0 -0
  126. {cnhkmcp-2.0.3 → cnhkmcp-2.1.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cnhkmcp
3
- Version: 2.0.3
3
+ Version: 2.1.0
4
4
  Summary: A comprehensive Model Context Protocol (MCP) server for quantitative trading platform integration
5
5
  Home-page: https://github.com/cnhk/cnhkmcp
6
6
  Author: CNHK
@@ -0,0 +1,38 @@
1
+ # AI Desktop Assistant
2
+
3
+ 这是一个简单的桌面助手应用,模仿“豆包”的体验,允许用户截取屏幕并与 AI 进行对话。
4
+
5
+ ## 功能
6
+ - **截屏提问 (Snip & Chat)**: 点击按钮,截取当前全屏,并自动打开聊天窗口发送给 AI。
7
+ - **纯文本对话**: 直接与 AI 聊天。
8
+
9
+ ## 安装与配置
10
+
11
+ 1. **安装依赖**:
12
+ 确保你已经安装了 Python。在终端中运行以下命令安装所需库:
13
+ ```bash
14
+ pip install -r requirements.txt
15
+ ```
16
+
17
+ 2. **配置 API**:
18
+ 打开 `config.json` 文件,确认你的 API Key 和 Base URL 配置正确。
19
+ ```json
20
+ {
21
+ "api_key": "你的API_KEY",
22
+ "base_url": "https://api.moonshot.cn/v1",
23
+ "model": "kimi-k2-turbo-preview"
24
+ }
25
+ ```
26
+ *注意*: 如果使用的模型不支持直接图片输入(Vision),API 可能会报错。如果遇到这种情况,请尝试更换支持 Vision 的模型,或者仅使用文本功能。
27
+
28
+ ## 运行
29
+
30
+ 双击 `run.bat` 或在终端运行:
31
+ ```bash
32
+ python main.py
33
+ ```
34
+
35
+ ## 未来计划
36
+ - 支持实时语音对话。
37
+ - 支持局部区域截屏(目前为全屏)。
38
+ - 优化图片上传逻辑(适配 Moonshot 文件上传 API)。
@@ -0,0 +1,6 @@
1
+ {
2
+ "api_key": "",
3
+ "base_url": "https://api.moonshot.cn/v1",
4
+ "model": "kimi-latest",
5
+ "system_prompt": "You are a WorldQuant BRAIN platform expert and Consultant. Your goal is to assist users with Alpha development, BRAIN API usage, and maximizing consultant income.\n\nYour expertise includes:\n- Deep knowledge of the BRAIN API (authentication, data, simulation, analysis).\n- Alpha development best practices (stable PnL, economic sense, avoiding pitfalls).\n- Consultant income structure (daily pay, Genius Program, increasing earnings).\n\nGuidelines:\n- Always refer to the BRAIN_Consultant_Starter_Handbook.md for guidance.\n- Emphasize the importance of stable PnL and economic sense when discussing Alphas.\n- Follow the handbook's workflow for API usage.\n- Explain income components clearly when asked.\n- IMPORTANT: You cannot directly interact with the platform. You must guide the user step-by-step on what actions to take (e.g., 'Copy this code', 'Go to the Simulation page').\n- Always suggest the specific next operation the user should perform.\n\nKey Concepts:\n1. Pyramid:\n - Definition: Combination of Region + Delay + Data Category. 'Lit' when 3+ Alphas are submitted in that combo.\n - Purpose: Measures diversity; affects promotions and QualityFactor.\n - Tips: Target underfilled pyramids; use grouping fields; track via MCP.\n\n2. Simulation Settings:\n - Key fields: instrument_type, region, delay (D0/D1), universe, neutralization, decay, truncation, etc.\n - Best Practices: Preprocess (winsorize -> zscore) -> Neutralize. Validate exposures.\n - Neutralization: Use regression_neut or group_neutralize. Consider CROWDING or RAM options.\n - Universes: Choose based on investability (e.g., TOP3000, TOPSP500).\n\nIf the user provides a screenshot, analyze it in the context of the BRAIN platform (e.g., Alpha code, simulation results, error messages). Answer in Chinese."
6
+ }
@@ -0,0 +1,157 @@
1
+ import getpass
2
+ import json
3
+ import os
4
+ import sys
5
+ from typing import List
6
+
7
+ import pandas as pd
8
+
9
+ # Ensure we can import ace_lib from the project root
10
+ SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
11
+ ROOT_DIR = os.path.dirname(SCRIPT_DIR)
12
+ if ROOT_DIR not in sys.path:
13
+ sys.path.append(ROOT_DIR)
14
+
15
+ import ace_lib # noqa: E402
16
+
17
+
18
+ def prompt_credentials() -> tuple[str, str]:
19
+ """Prompt user for platform credentials."""
20
+ email = input("Enter BRAIN Email: ").strip()
21
+ while not email:
22
+ email = input("Email is required. Enter BRAIN Email: ").strip()
23
+
24
+ password = getpass.getpass("Enter BRAIN Password: ").strip()
25
+ while not password:
26
+ password = getpass.getpass("Password is required. Enter BRAIN Password: ").strip()
27
+
28
+ return email, password
29
+
30
+
31
+ def fetch_all_combinations(session: ace_lib.SingleSession) -> pd.DataFrame:
32
+ """Return all valid instrument/region/delay/universe combos from platform settings."""
33
+ options_df = ace_lib.get_instrument_type_region_delay(session)
34
+ if options_df is None or options_df.empty:
35
+ raise RuntimeError("No simulation options fetched; cannot enumerate datasets.")
36
+ return options_df
37
+
38
+
39
+ def fetch_datasets_for_combo(
40
+ session: ace_lib.SingleSession,
41
+ instrument_type: str,
42
+ region: str,
43
+ delay: int,
44
+ universe: str,
45
+ ) -> pd.DataFrame:
46
+ """Fetch datasets for one combination (theme ALL to include both theme true/false)."""
47
+ df = ace_lib.get_datasets(
48
+ session,
49
+ instrument_type=instrument_type,
50
+ region=region,
51
+ delay=delay,
52
+ universe=universe,
53
+ theme="ALL",
54
+ )
55
+ if df is None:
56
+ return pd.DataFrame()
57
+
58
+ df = df.copy()
59
+ df["param_instrument_type"] = instrument_type
60
+ df["param_region"] = region
61
+ df["param_delay"] = delay
62
+ df["param_universe"] = universe
63
+ df["combo_key"] = df.apply(
64
+ lambda row: f"{instrument_type}-{region}-D{delay}-{universe}",
65
+ axis=1,
66
+ )
67
+ return df
68
+
69
+
70
+ def merge_and_deduplicate(datasets: List[pd.DataFrame]) -> pd.DataFrame:
71
+ """Merge fetched datasets and deduplicate by dataset id, keeping all combo metadata."""
72
+ combined = pd.concat([df for df in datasets if not df.empty], ignore_index=True)
73
+ if combined.empty:
74
+ return combined
75
+
76
+ # Aggregate availability combos per dataset id
77
+ availability = (
78
+ combined.groupby("id")["combo_key"]
79
+ .agg(lambda x: " | ".join(sorted(set(x))))
80
+ .rename("available_in")
81
+ .reset_index()
82
+ )
83
+
84
+ # Drop duplicate rows by dataset id, keep first occurrence of other columns
85
+ unique_df = combined.drop_duplicates(subset=["id"]).copy()
86
+ unique_df = unique_df.merge(availability, on="id", how="left")
87
+
88
+ # Sort for readability
89
+ sort_cols = [col for col in ["category", "subcategory", "id"] if col in unique_df.columns]
90
+ if sort_cols:
91
+ # Ensure sort keys are hashable/strings to avoid unhashable dict errors
92
+ for col in sort_cols:
93
+ unique_df[col] = unique_df[col].apply(
94
+ lambda v: v
95
+ if pd.isna(v) or isinstance(v, (int, float, str, bool))
96
+ else json.dumps(v, ensure_ascii=False, sort_keys=True)
97
+ )
98
+ unique_df = unique_df.sort_values(sort_cols).reset_index(drop=True)
99
+
100
+ return unique_df
101
+
102
+
103
+ def main():
104
+ print("=== Fetch All BRAIN Datasets (all regions/universes/delays) ===")
105
+
106
+ email, password = prompt_credentials()
107
+
108
+ # Monkey-patch ace_lib credential retrieval so start_session uses provided credentials
109
+ ace_lib.get_credentials = lambda: (email, password)
110
+
111
+ print("Logging in...")
112
+ try:
113
+ session = ace_lib.start_session()
114
+ print("Login successful.")
115
+ except Exception as exc:
116
+ print(f"Login failed: {exc}")
117
+ return
118
+
119
+ print("Fetching valid instrument/region/delay/universe combinations from platform settings...")
120
+ try:
121
+ options_df = fetch_all_combinations(session)
122
+ except Exception as exc:
123
+ print(f"Failed to fetch simulation options: {exc}")
124
+ return
125
+
126
+ all_datasets: List[pd.DataFrame] = []
127
+ total_combos = 0
128
+
129
+ for _, row in options_df.iterrows():
130
+ instrument_type = row.get("InstrumentType")
131
+ region = row.get("Region")
132
+ delay = row.get("Delay")
133
+ universes = row.get("Universe") or []
134
+
135
+ for universe in universes:
136
+ total_combos += 1
137
+ print(f"[{total_combos}] Fetching datasets for {instrument_type} / {region} / D{delay} / {universe}...")
138
+ try:
139
+ df = fetch_datasets_for_combo(session, instrument_type, region, delay, universe)
140
+ print(f" -> Retrieved {len(df)} rows")
141
+ all_datasets.append(df)
142
+ except Exception as exc:
143
+ print(f" -> Failed for {instrument_type}-{region}-D{delay}-{universe}: {exc}")
144
+
145
+ result_df = merge_and_deduplicate(all_datasets)
146
+
147
+ if result_df.empty:
148
+ print("No datasets fetched; nothing to save.")
149
+ return
150
+
151
+ output_path = os.path.join(SCRIPT_DIR, "all_datasets_full.csv")
152
+ result_df.to_csv(output_path, index=False)
153
+ print(f"Saved {len(result_df)} unique datasets to {output_path}")
154
+
155
+
156
+ if __name__ == "__main__":
157
+ main()
@@ -0,0 +1,132 @@
1
+ """Minimal helper script to log in, list tutorials, and dump each tutorial page.
2
+
3
+ - Prompts for email/password at runtime (keeps credentials out of source).
4
+ - Uses only the requests standard stack; no project-internal imports.
5
+ - Saves each page JSON to tutorial_dump/<index>_<page_id>.json for inspection.
6
+
7
+ Run with: python doc_fetch_sample.py
8
+ """
9
+ import base64
10
+ import getpass
11
+ import json
12
+ import os
13
+ from typing import Any, Dict, Iterable, List, Optional
14
+
15
+ import requests
16
+
17
+ BASE_URL = "https://api.worldquantbrain.com"
18
+
19
+
20
+ def _basic_auth_header(email: str, password: str) -> Dict[str, str]:
21
+ token = base64.b64encode(f"{email}:{password}".encode()).decode()
22
+ return {"Authorization": f"Basic {token}"}
23
+
24
+
25
+ def authenticate(email: str, password: str) -> requests.Session:
26
+ """Authenticate and return a session carrying the JWT cookie."""
27
+ session = requests.Session()
28
+ resp = session.post(f"{BASE_URL}/authentication", headers=_basic_auth_header(email, password), timeout=30)
29
+ if resp.status_code != 201:
30
+ raise RuntimeError(f"Authentication failed (status {resp.status_code}): {resp.text}")
31
+ return session
32
+
33
+
34
+ def fetch_tutorials(session: requests.Session) -> List[Dict[str, Any]]:
35
+ """Fetch tutorials list; handle a few common response shapes."""
36
+ resp = session.get(f"{BASE_URL}/tutorials", timeout=30)
37
+ resp.raise_for_status()
38
+ data = resp.json()
39
+ if isinstance(data, list):
40
+ return data
41
+ if isinstance(data, dict):
42
+ for key in ("items", "results", "data", "tutorials"):
43
+ maybe = data.get(key)
44
+ if isinstance(maybe, list):
45
+ return maybe
46
+ return []
47
+
48
+
49
+ def fetch_tutorial_pages(session: requests.Session, tutorial_id: str) -> List[Dict[str, Any]]:
50
+ """Fetch pages for a tutorial when the list entry only gives a tutorial id/slug."""
51
+ resp = session.get(f"{BASE_URL}/tutorials/{tutorial_id}/pages", timeout=30)
52
+ if resp.status_code == 404:
53
+ return [] # graceful fallback
54
+ resp.raise_for_status()
55
+ data = resp.json()
56
+ if isinstance(data, list):
57
+ return data
58
+ if isinstance(data, dict):
59
+ for key in ("items", "results", "pages", "data"):
60
+ maybe = data.get(key)
61
+ if isinstance(maybe, list):
62
+ return maybe
63
+ return []
64
+
65
+
66
+ def _extract_page_id(entry: Dict[str, Any]) -> Optional[str]:
67
+ for key in ("page_id", "pageId", "id", "pageID", "slug", "code"):
68
+ if key in entry and entry[key] is not None:
69
+ return str(entry[key])
70
+ return None
71
+
72
+
73
+ def fetch_page(session: requests.Session, page_id: str) -> Dict[str, Any]:
74
+ resp = session.get(f"{BASE_URL}/tutorial-pages/{page_id}", timeout=30)
75
+ resp.raise_for_status()
76
+ return resp.json()
77
+
78
+
79
+ def dump_pages(session: requests.Session, tutorials: List[Dict[str, Any]], out_dir: str = "tutorial_dump") -> None:
80
+ os.makedirs(out_dir, exist_ok=True)
81
+
82
+ # Save raw tutorials list for inspection
83
+ with open(os.path.join(out_dir, "tutorials_raw.json"), "w", encoding="utf-8") as f:
84
+ json.dump(tutorials, f, ensure_ascii=False, indent=2)
85
+
86
+ def _iter_page_candidates(item: Dict[str, Any]) -> Iterable[Dict[str, Any]]:
87
+ # If the tutorial entry already has pages array, yield them
88
+ if isinstance(item.get("pages"), list):
89
+ for p in item["pages"]:
90
+ yield p
91
+ # Else, try fetching pages via tutorial id/slug
92
+ tutorial_id = _extract_page_id(item)
93
+ if tutorial_id:
94
+ pages = fetch_tutorial_pages(session, tutorial_id)
95
+ for p in pages:
96
+ yield p
97
+ # Lastly, treat the tutorial itself as a single page if it has an id/slug
98
+ if tutorial_id:
99
+ yield {"id": tutorial_id, "title": item.get("title")}
100
+
101
+ seen = 0
102
+ for idx, item in enumerate(tutorials, start=1):
103
+ for page_entry in _iter_page_candidates(item):
104
+ page_id = _extract_page_id(page_entry)
105
+ if not page_id:
106
+ print(f"[{idx:03d}] skipped page (no id): {page_entry}")
107
+ continue
108
+ try:
109
+ page = fetch_page(session, page_id)
110
+ except requests.HTTPError as e:
111
+ print(f"[{idx:03d}] page {page_id} -> HTTP {e.response.status_code} ({page_entry})")
112
+ continue
113
+ seen += 1
114
+ title = page.get("title") or page_entry.get("title") or item.get("title") or f"page_{page_id}"
115
+ out_path = os.path.join(out_dir, f"{idx:03d}_{seen:02d}_{page_id}.json")
116
+ with open(out_path, "w", encoding="utf-8") as f:
117
+ json.dump(page, f, ensure_ascii=False, indent=2)
118
+ snippet = page.get("code") or page.get("content") or str(page)[:120]
119
+ print(f"[{idx:03d}] saved {title} -> {out_path}; sample: {str(snippet)[:80]}")
120
+
121
+
122
+ def main() -> None:
123
+ email = input("BRAIN email: ").strip()
124
+ password = getpass.getpass("BRAIN password: ")
125
+ session = authenticate(email, password)
126
+ tutorials = fetch_tutorials(session)
127
+ print(f"Fetched {len(tutorials)} tutorials")
128
+ dump_pages(session, tutorials)
129
+
130
+
131
+ if __name__ == "__main__":
132
+ main()
@@ -0,0 +1,99 @@
1
+ import getpass
2
+ import os
3
+ import sys
4
+ from typing import List
5
+
6
+ import pandas as pd
7
+
8
+ # Make ace_lib importable
9
+ SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
10
+ ROOT_DIR = os.path.dirname(SCRIPT_DIR)
11
+ if ROOT_DIR not in sys.path:
12
+ sys.path.append(ROOT_DIR)
13
+
14
+ import ace_lib # noqa: E402
15
+
16
+
17
+ def prompt_credentials() -> tuple[str, str]:
18
+ email = input("Enter BRAIN Email: ").strip()
19
+ while not email:
20
+ email = input("Email is required. Enter BRAIN Email: ").strip()
21
+
22
+ password = getpass.getpass("Enter BRAIN Password: ").strip()
23
+ while not password:
24
+ password = getpass.getpass("Password is required. Enter BRAIN Password: ").strip()
25
+
26
+ return email, password
27
+
28
+
29
+ def fetch_operators(session: ace_lib.SingleSession) -> pd.DataFrame:
30
+ df = ace_lib.get_operators(session)
31
+ if df is None or df.empty:
32
+ return pd.DataFrame()
33
+
34
+ df = df.copy()
35
+
36
+ # Choose an identifier column robustly
37
+ id_col = "id" if "id" in df.columns else None
38
+ if id_col is None:
39
+ if "name" in df.columns:
40
+ id_col = "name"
41
+ else:
42
+ id_col = "_row_id"
43
+ df[id_col] = df.index
44
+
45
+ # Re-aggregate scopes so each operator id is unique
46
+ if "scope" in df.columns:
47
+ scope_map = (
48
+ df.groupby(id_col)["scope"]
49
+ .agg(lambda x: sorted(set([item for item in x if pd.notna(item)])))
50
+ .rename("scopes")
51
+ .reset_index()
52
+ )
53
+ else:
54
+ scope_map = pd.DataFrame({id_col: df[id_col].unique(), "scopes": [[] for _ in range(df[id_col].nunique())]})
55
+
56
+ unique_df = df.drop(columns=["scope"], errors="ignore").drop_duplicates(subset=[id_col]).merge(
57
+ scope_map, on=id_col, how="left"
58
+ )
59
+
60
+ # Sort for readability
61
+ sort_cols: List[str] = [col for col in ["category", "subcategory", "name", id_col] if col in unique_df.columns]
62
+ if sort_cols:
63
+ unique_df = unique_df.sort_values(sort_cols).reset_index(drop=True)
64
+
65
+ return unique_df
66
+
67
+
68
+ def main():
69
+ print("=== Fetch All BRAIN Operators ===")
70
+
71
+ email, password = prompt_credentials()
72
+ ace_lib.get_credentials = lambda: (email, password)
73
+
74
+ print("Logging in...")
75
+ try:
76
+ session = ace_lib.start_session()
77
+ print("Login successful.")
78
+ except Exception as exc:
79
+ print(f"Login failed: {exc}")
80
+ return
81
+
82
+ print("Fetching operators...")
83
+ try:
84
+ operators_df = fetch_operators(session)
85
+ except Exception as exc:
86
+ print(f"Failed to fetch operators: {exc}")
87
+ return
88
+
89
+ if operators_df.empty:
90
+ print("No operators returned; nothing to save.")
91
+ return
92
+
93
+ output_path = os.path.join(SCRIPT_DIR, "all_operators.csv")
94
+ operators_df.to_csv(output_path, index=False)
95
+ print(f"Saved {len(operators_df)} operators to {output_path}")
96
+
97
+
98
+ if __name__ == "__main__":
99
+ main()
@@ -0,0 +1 @@
1
+ 我们之间的暗号是”炸弹超人“我说炸弹的时候 你要回答超人