@wentorai/research-plugins 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (252) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +204 -0
  3. package/curated/analysis/README.md +64 -0
  4. package/curated/domains/README.md +104 -0
  5. package/curated/literature/README.md +53 -0
  6. package/curated/research/README.md +62 -0
  7. package/curated/tools/README.md +87 -0
  8. package/curated/writing/README.md +61 -0
  9. package/index.ts +39 -0
  10. package/mcp-configs/academic-db/ChatSpatial.json +17 -0
  11. package/mcp-configs/academic-db/academia-mcp.json +17 -0
  12. package/mcp-configs/academic-db/academic-paper-explorer.json +17 -0
  13. package/mcp-configs/academic-db/academic-search-mcp-server.json +17 -0
  14. package/mcp-configs/academic-db/agentinterviews-mcp.json +17 -0
  15. package/mcp-configs/academic-db/all-in-mcp.json +17 -0
  16. package/mcp-configs/academic-db/apple-health-mcp.json +17 -0
  17. package/mcp-configs/academic-db/arxiv-latex-mcp.json +17 -0
  18. package/mcp-configs/academic-db/arxiv-mcp-server.json +17 -0
  19. package/mcp-configs/academic-db/bgpt-mcp.json +17 -0
  20. package/mcp-configs/academic-db/biomcp.json +17 -0
  21. package/mcp-configs/academic-db/biothings-mcp.json +17 -0
  22. package/mcp-configs/academic-db/catalysishub-mcp-server.json +17 -0
  23. package/mcp-configs/academic-db/clinicaltrialsgov-mcp-server.json +17 -0
  24. package/mcp-configs/academic-db/deep-research-mcp.json +17 -0
  25. package/mcp-configs/academic-db/dicom-mcp.json +17 -0
  26. package/mcp-configs/academic-db/enrichr-mcp-server.json +17 -0
  27. package/mcp-configs/academic-db/fec-mcp-server.json +17 -0
  28. package/mcp-configs/academic-db/fhir-mcp-server-themomentum.json +17 -0
  29. package/mcp-configs/academic-db/fhir-mcp.json +19 -0
  30. package/mcp-configs/academic-db/gget-mcp.json +17 -0
  31. package/mcp-configs/academic-db/google-researcher-mcp.json +17 -0
  32. package/mcp-configs/academic-db/idea-reality-mcp.json +17 -0
  33. package/mcp-configs/academic-db/legiscan-mcp.json +19 -0
  34. package/mcp-configs/academic-db/lex.json +17 -0
  35. package/mcp-configs/ai-platform/Adaptive-Graph-of-Thoughts-MCP-server.json +17 -0
  36. package/mcp-configs/ai-platform/ai-counsel.json +17 -0
  37. package/mcp-configs/ai-platform/atlas-mcp-server.json +17 -0
  38. package/mcp-configs/ai-platform/counsel-mcp.json +17 -0
  39. package/mcp-configs/ai-platform/cross-llm-mcp.json +17 -0
  40. package/mcp-configs/ai-platform/gptr-mcp.json +17 -0
  41. package/mcp-configs/browser/decipher-research-agent.json +17 -0
  42. package/mcp-configs/browser/deep-research.json +17 -0
  43. package/mcp-configs/browser/everything-claude-code.json +17 -0
  44. package/mcp-configs/browser/gpt-researcher.json +17 -0
  45. package/mcp-configs/browser/heurist-agent-framework.json +17 -0
  46. package/mcp-configs/data-platform/4everland-hosting-mcp.json +17 -0
  47. package/mcp-configs/data-platform/context-keeper.json +17 -0
  48. package/mcp-configs/data-platform/context7.json +19 -0
  49. package/mcp-configs/data-platform/contextstream-mcp.json +17 -0
  50. package/mcp-configs/data-platform/email-mcp.json +17 -0
  51. package/mcp-configs/note-knowledge/ApeRAG.json +17 -0
  52. package/mcp-configs/note-knowledge/In-Memoria.json +17 -0
  53. package/mcp-configs/note-knowledge/agent-memory.json +17 -0
  54. package/mcp-configs/note-knowledge/aimemo.json +17 -0
  55. package/mcp-configs/note-knowledge/biel-mcp.json +19 -0
  56. package/mcp-configs/note-knowledge/cognee.json +17 -0
  57. package/mcp-configs/note-knowledge/context-awesome.json +17 -0
  58. package/mcp-configs/note-knowledge/context-mcp.json +17 -0
  59. package/mcp-configs/note-knowledge/conversation-handoff-mcp.json +17 -0
  60. package/mcp-configs/note-knowledge/cortex.json +17 -0
  61. package/mcp-configs/note-knowledge/devrag.json +17 -0
  62. package/mcp-configs/note-knowledge/easy-obsidian-mcp.json +17 -0
  63. package/mcp-configs/note-knowledge/engram.json +17 -0
  64. package/mcp-configs/note-knowledge/gnosis-mcp.json +17 -0
  65. package/mcp-configs/note-knowledge/graphlit-mcp-server.json +19 -0
  66. package/mcp-configs/reference-mgr/arxiv-cli.json +17 -0
  67. package/mcp-configs/reference-mgr/arxiv-search-mcp.json +17 -0
  68. package/mcp-configs/reference-mgr/chiken.json +17 -0
  69. package/mcp-configs/reference-mgr/claude-scholar.json +17 -0
  70. package/mcp-configs/reference-mgr/devonthink-mcp.json +17 -0
  71. package/mcp-configs/registry.json +447 -0
  72. package/openclaw.plugin.json +21 -0
  73. package/package.json +61 -0
  74. package/skills/analysis/dataviz/color-accessibility-guide/SKILL.md +230 -0
  75. package/skills/analysis/dataviz/geospatial-viz-guide/SKILL.md +218 -0
  76. package/skills/analysis/dataviz/interactive-viz-guide/SKILL.md +287 -0
  77. package/skills/analysis/dataviz/network-visualization-guide/SKILL.md +195 -0
  78. package/skills/analysis/dataviz/publication-figures-guide/SKILL.md +238 -0
  79. package/skills/analysis/dataviz/python-dataviz-guide/SKILL.md +195 -0
  80. package/skills/analysis/econometrics/causal-inference-guide/SKILL.md +197 -0
  81. package/skills/analysis/econometrics/iv-regression-guide/SKILL.md +198 -0
  82. package/skills/analysis/econometrics/panel-data-guide/SKILL.md +274 -0
  83. package/skills/analysis/econometrics/robustness-checks/SKILL.md +250 -0
  84. package/skills/analysis/econometrics/stata-regression/SKILL.md +117 -0
  85. package/skills/analysis/econometrics/time-series-guide/SKILL.md +235 -0
  86. package/skills/analysis/statistics/bayesian-statistics-guide/SKILL.md +221 -0
  87. package/skills/analysis/statistics/hypothesis-testing-guide/SKILL.md +210 -0
  88. package/skills/analysis/statistics/meta-analysis-guide/SKILL.md +206 -0
  89. package/skills/analysis/statistics/nonparametric-tests-guide/SKILL.md +221 -0
  90. package/skills/analysis/statistics/power-analysis-guide/SKILL.md +240 -0
  91. package/skills/analysis/statistics/sem-guide/SKILL.md +231 -0
  92. package/skills/analysis/statistics/survival-analysis-guide/SKILL.md +195 -0
  93. package/skills/analysis/wrangling/missing-data-handling/SKILL.md +224 -0
  94. package/skills/analysis/wrangling/pandas-data-wrangling/SKILL.md +242 -0
  95. package/skills/analysis/wrangling/questionnaire-design-guide/SKILL.md +234 -0
  96. package/skills/analysis/wrangling/text-mining-guide/SKILL.md +225 -0
  97. package/skills/domains/ai-ml/computer-vision-guide/SKILL.md +213 -0
  98. package/skills/domains/ai-ml/deep-learning-papers-guide/SKILL.md +200 -0
  99. package/skills/domains/ai-ml/llm-evaluation-guide/SKILL.md +194 -0
  100. package/skills/domains/ai-ml/prompt-engineering-research/SKILL.md +233 -0
  101. package/skills/domains/ai-ml/reinforcement-learning-guide/SKILL.md +254 -0
  102. package/skills/domains/ai-ml/transformer-architecture-guide/SKILL.md +233 -0
  103. package/skills/domains/biomedical/clinical-research-guide/SKILL.md +232 -0
  104. package/skills/domains/biomedical/clinicaltrials-api/SKILL.md +177 -0
  105. package/skills/domains/biomedical/epidemiology-guide/SKILL.md +200 -0
  106. package/skills/domains/biomedical/genomics-analysis-guide/SKILL.md +270 -0
  107. package/skills/domains/business/market-analysis-guide/SKILL.md +112 -0
  108. package/skills/domains/business/strategic-management-guide/SKILL.md +154 -0
  109. package/skills/domains/chemistry/computational-chemistry-guide/SKILL.md +266 -0
  110. package/skills/domains/chemistry/retrosynthesis-guide/SKILL.md +215 -0
  111. package/skills/domains/cs/algorithms-complexity-guide/SKILL.md +194 -0
  112. package/skills/domains/cs/dblp-api/SKILL.md +129 -0
  113. package/skills/domains/cs/software-engineering-research/SKILL.md +218 -0
  114. package/skills/domains/ecology/biodiversity-data-guide/SKILL.md +296 -0
  115. package/skills/domains/ecology/conservation-biology-guide/SKILL.md +198 -0
  116. package/skills/domains/ecology/gbif-api/SKILL.md +158 -0
  117. package/skills/domains/ecology/inaturalist-api/SKILL.md +173 -0
  118. package/skills/domains/economics/behavioral-economics-guide/SKILL.md +239 -0
  119. package/skills/domains/economics/development-economics-guide/SKILL.md +181 -0
  120. package/skills/domains/economics/fred-api/SKILL.md +189 -0
  121. package/skills/domains/education/curriculum-design-guide/SKILL.md +144 -0
  122. package/skills/domains/education/learning-science-guide/SKILL.md +150 -0
  123. package/skills/domains/finance/financial-data-analysis/SKILL.md +152 -0
  124. package/skills/domains/finance/quantitative-finance-guide/SKILL.md +151 -0
  125. package/skills/domains/geoscience/climate-science-guide/SKILL.md +158 -0
  126. package/skills/domains/geoscience/gis-remote-sensing-guide/SKILL.md +129 -0
  127. package/skills/domains/humanities/digital-humanities-guide/SKILL.md +181 -0
  128. package/skills/domains/humanities/philosophy-research-guide/SKILL.md +148 -0
  129. package/skills/domains/law/courtlistener-api/SKILL.md +213 -0
  130. package/skills/domains/law/legal-research-guide/SKILL.md +250 -0
  131. package/skills/domains/math/linear-algebra-applications/SKILL.md +227 -0
  132. package/skills/domains/math/numerical-methods-guide/SKILL.md +236 -0
  133. package/skills/domains/math/oeis-api/SKILL.md +158 -0
  134. package/skills/domains/pharma/clinical-pharmacology-guide/SKILL.md +165 -0
  135. package/skills/domains/pharma/drug-development-guide/SKILL.md +177 -0
  136. package/skills/domains/physics/computational-physics-guide/SKILL.md +300 -0
  137. package/skills/domains/physics/nasa-ads-api/SKILL.md +150 -0
  138. package/skills/domains/physics/quantum-computing-guide/SKILL.md +234 -0
  139. package/skills/domains/social-science/social-research-methods/SKILL.md +194 -0
  140. package/skills/domains/social-science/survey-research-guide/SKILL.md +182 -0
  141. package/skills/literature/discovery/citation-alert-guide/SKILL.md +154 -0
  142. package/skills/literature/discovery/conference-proceedings-guide/SKILL.md +142 -0
  143. package/skills/literature/discovery/literature-mapping-guide/SKILL.md +175 -0
  144. package/skills/literature/discovery/paper-tracking-guide/SKILL.md +211 -0
  145. package/skills/literature/discovery/rss-paper-feeds/SKILL.md +214 -0
  146. package/skills/literature/discovery/semantic-scholar-recs-guide/SKILL.md +164 -0
  147. package/skills/literature/fulltext/doaj-api/SKILL.md +120 -0
  148. package/skills/literature/fulltext/interlibrary-loan-guide/SKILL.md +163 -0
  149. package/skills/literature/fulltext/open-access-guide/SKILL.md +183 -0
  150. package/skills/literature/fulltext/pmc-oai-api/SKILL.md +184 -0
  151. package/skills/literature/fulltext/preprint-servers-guide/SKILL.md +128 -0
  152. package/skills/literature/fulltext/repository-harvesting-guide/SKILL.md +207 -0
  153. package/skills/literature/fulltext/unpaywall-api/SKILL.md +113 -0
  154. package/skills/literature/metadata/altmetrics-guide/SKILL.md +132 -0
  155. package/skills/literature/metadata/citation-network-guide/SKILL.md +236 -0
  156. package/skills/literature/metadata/crossref-api/SKILL.md +133 -0
  157. package/skills/literature/metadata/datacite-api/SKILL.md +126 -0
  158. package/skills/literature/metadata/doi-resolution-guide/SKILL.md +168 -0
  159. package/skills/literature/metadata/h-index-guide/SKILL.md +183 -0
  160. package/skills/literature/metadata/journal-metrics-guide/SKILL.md +188 -0
  161. package/skills/literature/metadata/opencitations-api/SKILL.md +128 -0
  162. package/skills/literature/metadata/orcid-api/SKILL.md +136 -0
  163. package/skills/literature/metadata/orcid-integration-guide/SKILL.md +178 -0
  164. package/skills/literature/search/arxiv-api/SKILL.md +95 -0
  165. package/skills/literature/search/biorxiv-api/SKILL.md +123 -0
  166. package/skills/literature/search/boolean-search-guide/SKILL.md +199 -0
  167. package/skills/literature/search/citation-chaining-guide/SKILL.md +148 -0
  168. package/skills/literature/search/database-comparison-guide/SKILL.md +100 -0
  169. package/skills/literature/search/europe-pmc-api/SKILL.md +120 -0
  170. package/skills/literature/search/google-scholar-guide/SKILL.md +182 -0
  171. package/skills/literature/search/mesh-terms-guide/SKILL.md +164 -0
  172. package/skills/literature/search/openalex-api/SKILL.md +134 -0
  173. package/skills/literature/search/pubmed-api/SKILL.md +130 -0
  174. package/skills/literature/search/scientify-literature-survey/SKILL.md +203 -0
  175. package/skills/literature/search/semantic-scholar-api/SKILL.md +134 -0
  176. package/skills/literature/search/systematic-search-strategy/SKILL.md +214 -0
  177. package/skills/research/automation/ai-scientist-guide/SKILL.md +228 -0
  178. package/skills/research/automation/data-collection-automation/SKILL.md +248 -0
  179. package/skills/research/automation/research-workflow-automation/SKILL.md +266 -0
  180. package/skills/research/deep-research/meta-synthesis-guide/SKILL.md +174 -0
  181. package/skills/research/deep-research/research-cog/SKILL.md +153 -0
  182. package/skills/research/deep-research/scoping-review-guide/SKILL.md +217 -0
  183. package/skills/research/deep-research/systematic-review-guide/SKILL.md +250 -0
  184. package/skills/research/funding/figshare-api/SKILL.md +163 -0
  185. package/skills/research/funding/grant-writing-guide/SKILL.md +233 -0
  186. package/skills/research/funding/nsf-grant-guide/SKILL.md +206 -0
  187. package/skills/research/funding/open-science-guide/SKILL.md +255 -0
  188. package/skills/research/funding/zenodo-api/SKILL.md +174 -0
  189. package/skills/research/methodology/action-research-guide/SKILL.md +201 -0
  190. package/skills/research/methodology/experimental-design-guide/SKILL.md +236 -0
  191. package/skills/research/methodology/grad-school-guide/SKILL.md +182 -0
  192. package/skills/research/methodology/grounded-theory-guide/SKILL.md +171 -0
  193. package/skills/research/methodology/mixed-methods-guide/SKILL.md +208 -0
  194. package/skills/research/methodology/qualitative-research-guide/SKILL.md +234 -0
  195. package/skills/research/methodology/scientify-idea-generation/SKILL.md +222 -0
  196. package/skills/research/paper-review/paper-reading-assistant/SKILL.md +266 -0
  197. package/skills/research/paper-review/peer-review-guide/SKILL.md +227 -0
  198. package/skills/research/paper-review/rebuttal-writing-guide/SKILL.md +185 -0
  199. package/skills/research/paper-review/scientify-write-review-paper/SKILL.md +209 -0
  200. package/skills/tools/code-exec/jupyter-notebook-guide/SKILL.md +178 -0
  201. package/skills/tools/code-exec/python-reproducibility-guide/SKILL.md +341 -0
  202. package/skills/tools/code-exec/r-reproducibility-guide/SKILL.md +236 -0
  203. package/skills/tools/code-exec/sandbox-execution-guide/SKILL.md +221 -0
  204. package/skills/tools/diagram/mermaid-diagram-guide/SKILL.md +269 -0
  205. package/skills/tools/diagram/plantuml-guide/SKILL.md +397 -0
  206. package/skills/tools/diagram/scientific-illustration-guide/SKILL.md +225 -0
  207. package/skills/tools/document/anystyle-api/SKILL.md +199 -0
  208. package/skills/tools/document/grobid-pdf-parsing/SKILL.md +294 -0
  209. package/skills/tools/document/markdown-academic-guide/SKILL.md +217 -0
  210. package/skills/tools/document/pdf-extraction-guide/SKILL.md +321 -0
  211. package/skills/tools/knowledge-graph/knowledge-graph-construction/SKILL.md +306 -0
  212. package/skills/tools/knowledge-graph/ontology-design-guide/SKILL.md +214 -0
  213. package/skills/tools/knowledge-graph/rag-methodology-guide/SKILL.md +325 -0
  214. package/skills/tools/ocr-translate/formula-recognition-guide/SKILL.md +367 -0
  215. package/skills/tools/ocr-translate/handwriting-recognition-guide/SKILL.md +211 -0
  216. package/skills/tools/ocr-translate/latex-ocr-guide/SKILL.md +204 -0
  217. package/skills/tools/ocr-translate/multilingual-research-guide/SKILL.md +234 -0
  218. package/skills/tools/scraping/academic-web-scraping/SKILL.md +326 -0
  219. package/skills/tools/scraping/api-data-collection-guide/SKILL.md +301 -0
  220. package/skills/tools/scraping/web-scraping-ethics-guide/SKILL.md +250 -0
  221. package/skills/writing/citation/bibtex-management-guide/SKILL.md +246 -0
  222. package/skills/writing/citation/citation-style-guide/SKILL.md +248 -0
  223. package/skills/writing/citation/reference-manager-comparison/SKILL.md +208 -0
  224. package/skills/writing/citation/zotero-api/SKILL.md +188 -0
  225. package/skills/writing/composition/abstract-writing-guide/SKILL.md +188 -0
  226. package/skills/writing/composition/discussion-writing-guide/SKILL.md +194 -0
  227. package/skills/writing/composition/introduction-writing-guide/SKILL.md +194 -0
  228. package/skills/writing/composition/literature-review-writing/SKILL.md +196 -0
  229. package/skills/writing/composition/methods-section-guide/SKILL.md +185 -0
  230. package/skills/writing/composition/response-to-reviewers/SKILL.md +215 -0
  231. package/skills/writing/composition/scientific-writing-guide/SKILL.md +152 -0
  232. package/skills/writing/latex/bibliography-management-guide/SKILL.md +206 -0
  233. package/skills/writing/latex/latex-drawing-guide/SKILL.md +234 -0
  234. package/skills/writing/latex/latex-ecosystem-guide/SKILL.md +240 -0
  235. package/skills/writing/latex/math-typesetting-guide/SKILL.md +231 -0
  236. package/skills/writing/latex/overleaf-collaboration-guide/SKILL.md +211 -0
  237. package/skills/writing/latex/tikz-diagrams-guide/SKILL.md +211 -0
  238. package/skills/writing/polish/academic-translation-guide/SKILL.md +175 -0
  239. package/skills/writing/polish/academic-writing-refiner/SKILL.md +143 -0
  240. package/skills/writing/polish/ai-writing-humanizer/SKILL.md +178 -0
  241. package/skills/writing/polish/grammar-checker-guide/SKILL.md +184 -0
  242. package/skills/writing/polish/plagiarism-detection-guide/SKILL.md +167 -0
  243. package/skills/writing/templates/beamer-presentation-guide/SKILL.md +263 -0
  244. package/skills/writing/templates/conference-paper-template/SKILL.md +219 -0
  245. package/skills/writing/templates/thesis-template-guide/SKILL.md +200 -0
  246. package/skills/writing/templates/thesis-writing-guide/SKILL.md +220 -0
  247. package/src/tools/arxiv.ts +131 -0
  248. package/src/tools/crossref.ts +112 -0
  249. package/src/tools/openalex.ts +174 -0
  250. package/src/tools/pubmed.ts +166 -0
  251. package/src/tools/semantic-scholar.ts +108 -0
  252. package/src/tools/unpaywall.ts +58 -0
@@ -0,0 +1,151 @@
1
+ ---
2
+ name: quantitative-finance-guide
3
+ description: "Quantitative methods for financial modeling, derivatives pricing, and risk an..."
4
+ metadata:
5
+ openclaw:
6
+ emoji: "bar_chart"
7
+ category: "domains"
8
+ subcategory: "finance"
9
+ keywords: ["quantitative finance", "financial data", "stock analysis", "pricing psychology", "derivatives pricing"]
10
+ source: "wentor"
11
+ ---
12
+
13
+ # Quantitative Finance Guide
14
+
15
+ A rigorous skill for applying quantitative methods to financial research, covering derivatives pricing, portfolio optimization, risk modeling, and time series econometrics. Designed for academic researchers and quantitative analysts.
16
+
17
+ ## Derivatives Pricing
18
+
19
+ ### Black-Scholes-Merton Model
20
+
21
+ The foundational model for European option pricing:
22
+
23
+ ```python
24
+ import numpy as np
25
+ from scipy.stats import norm
26
+
27
+ def black_scholes(S: float, K: float, T: float, r: float,
28
+ sigma: float, option_type: str = 'call') -> dict:
29
+ """
30
+ Black-Scholes European option pricing.
31
+
32
+ Args:
33
+ S: Current stock price
34
+ K: Strike price
35
+ T: Time to maturity (years)
36
+ r: Risk-free rate (annualized)
37
+ sigma: Volatility (annualized)
38
+ option_type: 'call' or 'put'
39
+ """
40
+ d1 = (np.log(S / K) + (r + 0.5 * sigma**2) * T) / (sigma * np.sqrt(T))
41
+ d2 = d1 - sigma * np.sqrt(T)
42
+
43
+ if option_type == 'call':
44
+ price = S * norm.cdf(d1) - K * np.exp(-r * T) * norm.cdf(d2)
45
+ else:
46
+ price = K * np.exp(-r * T) * norm.cdf(-d2) - S * norm.cdf(-d1)
47
+
48
+ greeks = {
49
+ 'delta': norm.cdf(d1) if option_type == 'call' else norm.cdf(d1) - 1,
50
+ 'gamma': norm.pdf(d1) / (S * sigma * np.sqrt(T)),
51
+ 'theta': -(S * norm.pdf(d1) * sigma) / (2 * np.sqrt(T)),
52
+ 'vega': S * norm.pdf(d1) * np.sqrt(T),
53
+ 'rho': K * T * np.exp(-r * T) * norm.cdf(d2) if option_type == 'call'
54
+ else -K * T * np.exp(-r * T) * norm.cdf(-d2)
55
+ }
56
+ return {'price': price, 'greeks': greeks}
57
+
58
+ # Example: price a call option
59
+ result = black_scholes(S=100, K=105, T=0.5, r=0.05, sigma=0.20, option_type='call')
60
+ print(f"Call Price: ${result['price']:.2f}")
61
+ print(f"Delta: {result['greeks']['delta']:.4f}")
62
+ ```
63
+
64
+ ### Monte Carlo Simulation
65
+
66
+ For path-dependent options and complex payoffs:
67
+
68
+ ```python
69
+ def monte_carlo_option(S0, K, T, r, sigma, n_paths=100000, n_steps=252):
70
+ """Geometric Brownian Motion Monte Carlo pricer."""
71
+ dt = T / n_steps
72
+ Z = np.random.standard_normal((n_paths, n_steps))
73
+ paths = np.zeros((n_paths, n_steps + 1))
74
+ paths[:, 0] = S0
75
+
76
+ for t in range(n_steps):
77
+ paths[:, t + 1] = paths[:, t] * np.exp(
78
+ (r - 0.5 * sigma**2) * dt + sigma * np.sqrt(dt) * Z[:, t]
79
+ )
80
+
81
+ payoffs = np.maximum(paths[:, -1] - K, 0)
82
+ price = np.exp(-r * T) * np.mean(payoffs)
83
+ std_err = np.exp(-r * T) * np.std(payoffs) / np.sqrt(n_paths)
84
+ return {'price': price, 'std_error': std_err, '95_ci': (price - 1.96*std_err, price + 1.96*std_err)}
85
+ ```
86
+
87
+ ## Portfolio Optimization
88
+
89
+ ### Mean-Variance Optimization (Markowitz)
90
+
91
+ Construct efficient frontiers using quadratic programming:
92
+
93
+ ```python
94
+ from scipy.optimize import minimize
95
+
96
+ def efficient_frontier(returns: np.ndarray, n_portfolios: int = 50) -> list:
97
+ """
98
+ Compute efficient frontier points.
99
+ returns: T x N array of asset returns
100
+ """
101
+ n_assets = returns.shape[1]
102
+ mean_returns = returns.mean(axis=0)
103
+ cov_matrix = np.cov(returns.T)
104
+
105
+ results = []
106
+ target_returns = np.linspace(mean_returns.min(), mean_returns.max(), n_portfolios)
107
+
108
+ for target in target_returns:
109
+ constraints = [
110
+ {'type': 'eq', 'fun': lambda w: np.sum(w) - 1},
111
+ {'type': 'eq', 'fun': lambda w, t=target: w @ mean_returns - t}
112
+ ]
113
+ bounds = [(0, 1)] * n_assets
114
+ w0 = np.ones(n_assets) / n_assets
115
+
116
+ result = minimize(lambda w: w @ cov_matrix @ w, w0,
117
+ bounds=bounds, constraints=constraints, method='SLSQP')
118
+ if result.success:
119
+ vol = np.sqrt(result.fun)
120
+ results.append({'return': target, 'volatility': vol, 'weights': result.x})
121
+ return results
122
+ ```
123
+
124
+ ## Risk Management
125
+
126
+ ### Value at Risk (VaR) and Expected Shortfall
127
+
128
+ Three approaches to VaR estimation:
129
+
130
+ 1. **Historical Simulation**: Non-parametric, uses actual return distribution
131
+ 2. **Variance-Covariance (Parametric)**: Assumes normal distribution, fast computation
132
+ 3. **Monte Carlo VaR**: Most flexible, handles non-linear instruments
133
+
134
+ ```python
135
+ def compute_var_es(returns: np.ndarray, confidence: float = 0.95) -> dict:
136
+ """Compute VaR and Expected Shortfall (CVaR)."""
137
+ sorted_returns = np.sort(returns)
138
+ var_index = int((1 - confidence) * len(sorted_returns))
139
+ var = -sorted_returns[var_index]
140
+ es = -sorted_returns[:var_index].mean()
141
+ return {'VaR': var, 'ES': es, 'confidence': confidence}
142
+ ```
143
+
144
+ ## Time Series Econometrics
145
+
146
+ For financial time series, test for stationarity (ADF test), model volatility clustering with GARCH models, and check for cointegration in pairs trading strategies. Always report Newey-West standard errors when autocorrelation is present, and use information criteria (AIC, BIC) for model selection.
147
+
148
+ ## References
149
+
150
+ - Hull, J. C. (2022). *Options, Futures, and Other Derivatives* (11th ed.). Pearson.
151
+ - Markowitz, H. (1952). Portfolio Selection. *Journal of Finance*, 7(1), 77-91.
@@ -0,0 +1,158 @@
1
+ ---
2
+ name: climate-science-guide
3
+ description: "Climate data analysis, modeling workflows, and carbon neutrality research met..."
4
+ metadata:
5
+ openclaw:
6
+ emoji: "cloud"
7
+ category: "domains"
8
+ subcategory: "geoscience"
9
+ keywords: ["climate change", "carbon neutrality", "atmospheric science", "climatology", "climate modeling"]
10
+ source: "wentor"
11
+ ---
12
+
13
+ # Climate Science Guide
14
+
15
+ A research skill for analyzing climate data, working with climate model outputs, and conducting carbon-related studies. Covers data sources, standard analytical workflows, and visualization techniques used in climate science publications.
16
+
17
+ ## Climate Data Sources
18
+
19
+ ### Observational Datasets
20
+
21
+ | Dataset | Variables | Resolution | Period | Source |
22
+ |---------|-----------|-----------|--------|--------|
23
+ | ERA5 | Temperature, precipitation, wind, etc. | 0.25 deg, hourly | 1940-present | ECMWF/Copernicus |
24
+ | GPCP | Precipitation | 2.5 deg, monthly | 1979-present | NASA |
25
+ | HadCRUT5 | Surface temperature anomaly | 5 deg, monthly | 1850-present | Met Office |
26
+ | NOAA GHCN | Station temperature, precipitation | Point data | 1850-present | NOAA |
27
+ | CRU TS | Temperature, precipitation, vapor pressure | 0.5 deg, monthly | 1901-present | UEA CRU |
28
+
29
+ ### CMIP6 Model Outputs
30
+
31
+ ```python
32
+ import xarray as xr
33
+
34
+ def load_cmip6_data(model: str, experiment: str, variable: str,
35
+ member: str = 'r1i1p1f1') -> xr.Dataset:
36
+ """
37
+ Load CMIP6 model output from a local or cloud archive.
38
+
39
+ Args:
40
+ model: Model name (e.g., 'CESM2', 'UKESM1-0-LL')
41
+ experiment: SSP scenario (e.g., 'ssp245', 'ssp585', 'historical')
42
+ variable: Variable name (e.g., 'tas', 'pr', 'tos')
43
+ member: Ensemble member ID
44
+ """
45
+ # Using Pangeo cloud catalog
46
+ import intake
47
+ catalog = intake.open_esm_datastore(
48
+ "https://storage.googleapis.com/cmip6/pangeo-cmip6.json"
49
+ )
50
+ query = catalog.search(
51
+ source_id=model,
52
+ experiment_id=experiment,
53
+ variable_id=variable,
54
+ member_id=member,
55
+ table_id='Amon' # Monthly atmospheric data
56
+ )
57
+ ds = query.to_dataset_dict(zarr_kwargs={'consolidated': True})
58
+ key = list(ds.keys())[0]
59
+ return ds[key]
60
+ ```
61
+
62
+ ## Temperature Trend Analysis
63
+
64
+ ### Computing Global Mean Temperature Anomaly
65
+
66
+ ```python
67
+ import numpy as np
68
+
69
+ def compute_global_mean_anomaly(ds: xr.Dataset, var: str = 'tas',
70
+ baseline: tuple = (1850, 1900)) -> xr.DataArray:
71
+ """
72
+ Compute area-weighted global mean temperature anomaly
73
+ relative to a baseline period.
74
+ """
75
+ # Area weighting by latitude
76
+ weights = np.cos(np.deg2rad(ds.lat))
77
+ weights = weights / weights.sum()
78
+
79
+ # Global mean
80
+ global_mean = ds[var].weighted(weights).mean(dim=['lat', 'lon'])
81
+
82
+ # Baseline climatology
83
+ baseline_mean = global_mean.sel(
84
+ time=slice(str(baseline[0]), str(baseline[1]))
85
+ ).mean('time')
86
+
87
+ anomaly = global_mean - baseline_mean
88
+ return anomaly
89
+
90
+ # Usage
91
+ # anomaly = compute_global_mean_anomaly(historical_ds)
92
+ # anomaly.plot() # produces a time series of temperature anomaly
93
+ ```
94
+
95
+ ## Carbon Budget Analysis
96
+
97
+ ### Emissions and Remaining Budget
98
+
99
+ Track cumulative CO2 emissions against the remaining carbon budget for temperature targets:
100
+
101
+ ```python
102
+ def carbon_budget_tracker(cumulative_emissions_gtco2: float,
103
+ target_warming: float = 1.5) -> dict:
104
+ """
105
+ Estimate remaining carbon budget.
106
+ Based on IPCC AR6 estimates.
107
+ """
108
+ # IPCC AR6 remaining budget from 2020 (GtCO2)
109
+ budgets = {
110
+ 1.5: {'50pct': 500, '67pct': 400, '83pct': 300},
111
+ 2.0: {'50pct': 1350, '67pct': 1150, '83pct': 900}
112
+ }
113
+ budget = budgets[target_warming]
114
+ remaining = {prob: val - cumulative_emissions_gtco2
115
+ for prob, val in budget.items()}
116
+ # At ~40 GtCO2/year current rate
117
+ years_left = {prob: max(0, val / 40) for prob, val in remaining.items()}
118
+ return {'remaining_budget_GtCO2': remaining, 'years_at_current_rate': years_left}
119
+
120
+ result = carbon_budget_tracker(cumulative_emissions_gtco2=200, target_warming=1.5)
121
+ print(result)
122
+ ```
123
+
124
+ ## Climate Visualization
125
+
126
+ ### Spatial Maps with Cartopy
127
+
128
+ ```python
129
+ import matplotlib.pyplot as plt
130
+ import cartopy.crs as ccrs
131
+
132
+ def plot_climate_map(data: xr.DataArray, title: str,
133
+ cmap: str = 'RdBu_r', vmin: float = None,
134
+ vmax: float = None):
135
+ """Publication-quality climate map."""
136
+ fig = plt.figure(figsize=(12, 6))
137
+ ax = fig.add_subplot(1, 1, 1, projection=ccrs.Robinson())
138
+ ax.coastlines(linewidth=0.5)
139
+ ax.gridlines(draw_labels=True, linewidth=0.3, alpha=0.5)
140
+
141
+ im = data.plot(ax=ax, transform=ccrs.PlateCarree(),
142
+ cmap=cmap, vmin=vmin, vmax=vmax,
143
+ add_colorbar=False)
144
+ cbar = plt.colorbar(im, ax=ax, orientation='horizontal',
145
+ pad=0.05, shrink=0.7)
146
+ cbar.set_label(data.attrs.get('units', ''))
147
+ ax.set_title(title, fontsize=14)
148
+ plt.tight_layout()
149
+ return fig
150
+ ```
151
+
152
+ ## Best Practices
153
+
154
+ - Always report uncertainties: use multi-model ensembles and provide confidence intervals
155
+ - Document data preprocessing steps for reproducibility
156
+ - Use standardized calendar handling (`cftime`) for model outputs with non-standard calendars
157
+ - Apply bias correction (e.g., quantile mapping) when comparing model outputs to observations
158
+ - Follow FAIR data principles and cite datasets using their DOIs
@@ -0,0 +1,129 @@
1
+ ---
2
+ name: gis-remote-sensing-guide
3
+ description: "GIS analysis and remote sensing workflows for geospatial research applications"
4
+ metadata:
5
+ openclaw:
6
+ emoji: "earth_americas"
7
+ category: "domains"
8
+ subcategory: "geoscience"
9
+ keywords: ["GIS", "remote sensing", "geology", "atmospheric science", "climatology", "geospatial analysis"]
10
+ source: "wentor"
11
+ ---
12
+
13
+ # GIS and Remote Sensing Guide
14
+
15
+ A comprehensive skill for conducting geospatial analysis and remote sensing research. Covers data acquisition from satellite platforms, spatial analysis with open-source tools, and publication-quality map production.
16
+
17
+ ## Satellite Data Sources
18
+
19
+ ### Key Earth Observation Platforms
20
+
21
+ | Platform | Provider | Spatial Res. | Revisit | Free? | Use Case |
22
+ |----------|----------|-------------|---------|-------|----------|
23
+ | Landsat 8/9 | USGS/NASA | 30m (MS), 15m (pan) | 16 days | Yes | Land cover, NDVI time series |
24
+ | Sentinel-2 | ESA/Copernicus | 10m | 5 days | Yes | Agriculture, urban mapping |
25
+ | MODIS | NASA | 250m-1km | 1-2 days | Yes | Large-scale vegetation, fire |
26
+ | Sentinel-1 | ESA | 5-20m | 6 days | Yes | SAR, flood mapping, deformation |
27
+ | SRTM/ASTER | NASA | 30m | N/A | Yes | Digital elevation models |
28
+
29
+ ### Data Download with Python
30
+
31
+ ```python
32
+ import ee
33
+
34
+ # Initialize Google Earth Engine
35
+ ee.Initialize()
36
+
37
+ def get_sentinel2_composite(aoi: ee.Geometry, start: str, end: str,
38
+ cloud_max: int = 20) -> ee.Image:
39
+ """
40
+ Create a cloud-free Sentinel-2 composite.
41
+
42
+ Args:
43
+ aoi: Area of interest as ee.Geometry
44
+ start: Start date (YYYY-MM-DD)
45
+ end: End date (YYYY-MM-DD)
46
+ cloud_max: Maximum cloud cover percentage
47
+ """
48
+ collection = (ee.ImageCollection('COPERNICUS/S2_SR_HARMONIZED')
49
+ .filterBounds(aoi)
50
+ .filterDate(start, end)
51
+ .filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', cloud_max)))
52
+
53
+ # Cloud masking using SCL band
54
+ def mask_clouds(image):
55
+ scl = image.select('SCL')
56
+ mask = scl.neq(3).And(scl.neq(8)).And(scl.neq(9)).And(scl.neq(10))
57
+ return image.updateMask(mask)
58
+
59
+ return collection.map(mask_clouds).median().clip(aoi)
60
+
61
+ # Define study area
62
+ study_area = ee.Geometry.Rectangle([116.0, 39.5, 117.0, 40.5]) # Beijing region
63
+ composite = get_sentinel2_composite(study_area, '2024-06-01', '2024-09-30')
64
+ ```
65
+
66
+ ## Spatial Analysis with GeoPandas
67
+
68
+ ### Vector Data Processing
69
+
70
+ ```python
71
+ import geopandas as gpd
72
+ from shapely.geometry import Point
73
+
74
+ def spatial_join_analysis(points_gdf: gpd.GeoDataFrame,
75
+ polygons_gdf: gpd.GeoDataFrame,
76
+ agg_col: str) -> gpd.GeoDataFrame:
77
+ """
78
+ Perform spatial join and aggregate point data within polygons.
79
+ """
80
+ joined = gpd.sjoin(points_gdf, polygons_gdf, how='inner', predicate='within')
81
+ summary = joined.groupby('index_right').agg(
82
+ count=(agg_col, 'count'),
83
+ mean_value=(agg_col, 'mean'),
84
+ std_value=(agg_col, 'std')
85
+ ).reset_index()
86
+ result = polygons_gdf.merge(summary, left_index=True, right_on='index_right')
87
+ return result
88
+
89
+ # Example: aggregate soil samples within administrative boundaries
90
+ soil_samples = gpd.read_file('soil_data.geojson')
91
+ admin_bounds = gpd.read_file('admin_boundaries.shp')
92
+ result = spatial_join_analysis(soil_samples, admin_bounds, 'pH_value')
93
+ ```
94
+
95
+ ## Remote Sensing Indices
96
+
97
+ ### Vegetation and Water Indices
98
+
99
+ ```python
100
+ import rasterio
101
+ import numpy as np
102
+
103
+ def compute_indices(image_path: str) -> dict:
104
+ """Compute common remote sensing spectral indices."""
105
+ with rasterio.open(image_path) as src:
106
+ red = src.read(3).astype(float) # Band 4 in Sentinel-2
107
+ nir = src.read(4).astype(float) # Band 8
108
+ green = src.read(2).astype(float) # Band 3
109
+ swir = src.read(5).astype(float) # Band 11
110
+
111
+ # Normalized Difference Vegetation Index
112
+ ndvi = (nir - red) / (nir + red + 1e-10)
113
+
114
+ # Normalized Difference Water Index
115
+ ndwi = (green - nir) / (green + nir + 1e-10)
116
+
117
+ # Normalized Burn Ratio
118
+ nbr = (nir - swir) / (nir + swir + 1e-10)
119
+
120
+ return {'NDVI': ndvi, 'NDWI': ndwi, 'NBR': nbr}
121
+ ```
122
+
123
+ ## Map Production
124
+
125
+ For publication-quality maps, always include: scale bar, north arrow, coordinate reference system label, legend, and data source attribution. Use `matplotlib` with `cartopy` for projected maps, or `folium` for interactive web maps. Export at 300 DPI minimum for journal submissions.
126
+
127
+ ## Coordinate Reference Systems
128
+
129
+ Always verify and document the CRS. Use EPSG codes (e.g., EPSG:4326 for WGS84, EPSG:32650 for UTM Zone 50N). Reproject all layers to a common CRS before spatial operations to avoid misalignment errors.
@@ -0,0 +1,181 @@
1
+ ---
2
+ name: digital-humanities-guide
3
+ description: "Computational methods for humanities research including text mining and netwo..."
4
+ metadata:
5
+ openclaw:
6
+ emoji: "scroll"
7
+ category: "domains"
8
+ subcategory: "humanities"
9
+ keywords: ["digital humanities", "philosophy", "literary studies", "art history", "linguistics", "text mining"]
10
+ source: "wentor"
11
+ ---
12
+
13
+ # Digital Humanities Guide
14
+
15
+ A skill for applying computational and quantitative methods to humanities research. Covers text mining, network analysis, spatial humanities, and digital archival methods. Designed for researchers bridging traditional humanities with data-driven approaches.
16
+
17
+ ## Text Mining and Distant Reading
18
+
19
+ ### Corpus Preparation
20
+
21
+ ```python
22
+ import re
23
+ from collections import Counter
24
+
25
+ def prepare_corpus(texts: list[str], stopwords: set = None) -> list[list[str]]:
26
+ """
27
+ Tokenize and clean a corpus of texts for analysis.
28
+
29
+ Args:
30
+ texts: List of raw text strings
31
+ stopwords: Set of words to remove
32
+ Returns:
33
+ List of tokenized, cleaned documents
34
+ """
35
+ if stopwords is None:
36
+ stopwords = {'the', 'a', 'an', 'and', 'or', 'but', 'in', 'on',
37
+ 'at', 'to', 'for', 'of', 'with', 'is', 'was', 'are'}
38
+
39
+ processed = []
40
+ for text in texts:
41
+ # Lowercase and remove punctuation
42
+ tokens = re.findall(r'\b[a-z]+\b', text.lower())
43
+ # Remove stopwords and short tokens
44
+ tokens = [t for t in tokens if t not in stopwords and len(t) > 2]
45
+ processed.append(tokens)
46
+ return processed
47
+
48
+ def compute_tfidf(corpus: list[list[str]]) -> dict:
49
+ """Compute TF-IDF scores for term importance analysis."""
50
+ import math
51
+ n_docs = len(corpus)
52
+ # Document frequency
53
+ df = Counter()
54
+ for doc in corpus:
55
+ df.update(set(doc))
56
+ # TF-IDF per document
57
+ tfidf_scores = []
58
+ for doc in corpus:
59
+ tf = Counter(doc)
60
+ total = len(doc)
61
+ scores = {}
62
+ for term, count in tf.items():
63
+ tf_val = count / total
64
+ idf_val = math.log(n_docs / (1 + df[term]))
65
+ scores[term] = tf_val * idf_val
66
+ tfidf_scores.append(scores)
67
+ return tfidf_scores
68
+ ```
69
+
70
+ ### Topic Modeling
71
+
72
+ Apply Latent Dirichlet Allocation (LDA) to discover thematic structures in large text corpora:
73
+
74
+ ```python
75
+ from gensim import corpora, models
76
+
77
+ def run_topic_model(corpus: list[list[str]], n_topics: int = 10,
78
+ passes: int = 15) -> models.LdaModel:
79
+ """
80
+ Train an LDA topic model on a preprocessed corpus.
81
+ """
82
+ dictionary = corpora.Dictionary(corpus)
83
+ dictionary.filter_extremes(no_below=5, no_above=0.5)
84
+ bow_corpus = [dictionary.doc2bow(doc) for doc in corpus]
85
+
86
+ lda_model = models.LdaModel(
87
+ bow_corpus,
88
+ num_topics=n_topics,
89
+ id2word=dictionary,
90
+ passes=passes,
91
+ random_state=42,
92
+ alpha='auto',
93
+ eta='auto'
94
+ )
95
+ return lda_model
96
+
97
+ # Print top words per topic
98
+ # for idx, topic in lda_model.print_topics(-1):
99
+ # print(f"Topic {idx}: {topic}")
100
+ ```
101
+
102
+ ## Network Analysis for Historical Research
103
+
104
+ ### Correspondence and Social Networks
105
+
106
+ ```python
107
+ import networkx as nx
108
+
109
+ def build_correspondence_network(letters: list[dict]) -> nx.Graph:
110
+ """
111
+ Build a social network from historical correspondence data.
112
+
113
+ Args:
114
+ letters: List of dicts with 'sender', 'recipient', 'date', 'location'
115
+ """
116
+ G = nx.Graph()
117
+ for letter in letters:
118
+ sender = letter['sender']
119
+ recipient = letter['recipient']
120
+ if G.has_edge(sender, recipient):
121
+ G[sender][recipient]['weight'] += 1
122
+ else:
123
+ G.add_edge(sender, recipient, weight=1)
124
+
125
+ # Compute centrality measures
126
+ degree_cent = nx.degree_centrality(G)
127
+ betweenness = nx.betweenness_centrality(G)
128
+
129
+ for node in G.nodes():
130
+ G.nodes[node]['degree_centrality'] = degree_cent[node]
131
+ G.nodes[node]['betweenness'] = betweenness[node]
132
+
133
+ return G
134
+
135
+ # Identify the most connected and most bridging figures
136
+ # sorted(degree_cent.items(), key=lambda x: x[1], reverse=True)[:10]
137
+ ```
138
+
139
+ ## Spatial Humanities
140
+
141
+ Map historical events, literary settings, or cultural artifacts using GIS tools:
142
+
143
+ - **QGIS** for desktop spatial analysis with historical maps
144
+ - **Recogito** for annotating place names in texts
145
+ - **Peripleo** for linked open geodata visualization
146
+ - **Palladio** for Stanford's humanities data visualization platform
147
+
148
+ Georeferencing historical maps requires at least 4 ground control points with known coordinates, using polynomial or thin-plate spline transformation.
149
+
150
+ ## Digital Archival Methods
151
+
152
+ ### TEI Encoding
153
+
154
+ The Text Encoding Initiative (TEI) is the standard for scholarly digital editions:
155
+
156
+ ```xml
157
+ <TEI xmlns="http://www.tei-c.org/ns/1.0">
158
+ <teiHeader>
159
+ <fileDesc>
160
+ <titleStmt>
161
+ <title>Letters of [Historical Figure]</title>
162
+ </titleStmt>
163
+ </fileDesc>
164
+ </teiHeader>
165
+ <text>
166
+ <body>
167
+ <div type="letter" n="1">
168
+ <opener>
169
+ <dateline><date when="1789-07-14">14 July 1789</date></dateline>
170
+ <salute>Dear Friend,</salute>
171
+ </opener>
172
+ <p>The events of today have been most extraordinary...</p>
173
+ </div>
174
+ </body>
175
+ </text>
176
+ </TEI>
177
+ ```
178
+
179
+ ## Ethical Considerations
180
+
181
+ Digital humanities research must address: copyright and fair use for digitized materials, privacy concerns for living subjects in social network analysis, algorithmic bias in NLP tools trained on modern English when applied to historical texts, and the responsibility to make digital scholarship accessible beyond the academy.