opencode-skills-antigravity 1.0.11 → 1.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/bundled-skills/aegisops-ai/SKILL.md +127 -0
  2. package/bundled-skills/docs/integrations/jetski-cortex.md +3 -3
  3. package/bundled-skills/docs/integrations/jetski-gemini-loader/README.md +1 -1
  4. package/bundled-skills/docs/maintainers/repo-growth-seo.md +3 -3
  5. package/bundled-skills/docs/maintainers/skills-update-guide.md +1 -1
  6. package/bundled-skills/docs/users/bundles.md +1 -1
  7. package/bundled-skills/docs/users/claude-code-skills.md +1 -1
  8. package/bundled-skills/docs/users/gemini-cli-skills.md +1 -1
  9. package/bundled-skills/docs/users/getting-started.md +1 -1
  10. package/bundled-skills/docs/users/kiro-integration.md +1 -1
  11. package/bundled-skills/docs/users/usage.md +4 -4
  12. package/bundled-skills/docs/users/visual-guide.md +4 -4
  13. package/bundled-skills/snowflake-development/SKILL.md +228 -0
  14. package/bundled-skills/wordpress/SKILL.md +281 -4
  15. package/bundled-skills/wordpress-penetration-testing/SKILL.md +106 -1
  16. package/bundled-skills/wordpress-plugin-development/SKILL.md +296 -3
  17. package/bundled-skills/wordpress-theme-development/SKILL.md +316 -3
  18. package/bundled-skills/wordpress-woocommerce-development/SKILL.md +442 -2
  19. package/bundled-skills/xvary-stock-research/LICENSE +21 -0
  20. package/bundled-skills/xvary-stock-research/SKILL.md +103 -0
  21. package/bundled-skills/xvary-stock-research/assets/nvda-deep-dive-hero.png +0 -0
  22. package/bundled-skills/xvary-stock-research/assets/nvda-deep-dive-scenarios.png +0 -0
  23. package/bundled-skills/xvary-stock-research/assets/nvda-deep-dive-thesis.png +0 -0
  24. package/bundled-skills/xvary-stock-research/assets/social-preview.png +0 -0
  25. package/bundled-skills/xvary-stock-research/examples/nvda-analysis.md +60 -0
  26. package/bundled-skills/xvary-stock-research/references/edgar-guide.md +53 -0
  27. package/bundled-skills/xvary-stock-research/references/methodology.md +153 -0
  28. package/bundled-skills/xvary-stock-research/references/scoring.md +111 -0
  29. package/bundled-skills/xvary-stock-research/tests/test_edgar.py +90 -0
  30. package/bundled-skills/xvary-stock-research/tests/test_market.py +113 -0
  31. package/bundled-skills/xvary-stock-research/tools/edgar.py +495 -0
  32. package/bundled-skills/xvary-stock-research/tools/market.py +302 -0
  33. package/package.json +1 -1
@@ -0,0 +1,302 @@
1
+ #!/usr/bin/env python3
2
+ """Standalone market data fetcher with no API key.
3
+
4
+ Public functions:
5
+ - get_quote(ticker)
6
+ - get_ratios(ticker)
7
+
8
+ Fallback order: Yahoo -> Finviz -> Stooq
9
+
10
+ Examples:
11
+ python tools/market.py AAPL
12
+ """
13
+
14
+ from __future__ import annotations
15
+
16
+ import argparse
17
+ import csv
18
+ import io
19
+ import json
20
+ import re
21
+ from datetime import datetime, timezone
22
+ import time
23
+ from typing import Any, Optional
24
+
25
+ import requests
26
+
27
+ _TIMEOUT = 20
28
+ _MAX_RETRIES = 3
29
+ _INITIAL_BACKOFF_SECONDS = 1.0
30
+ _RETRYABLE_STATUS_CODES = {429, 500, 502, 503, 504}
31
+ _HEADERS = {
32
+ "User-Agent": "claude-code-stock-analysis-skill/1.0 (research@xvary.com)",
33
+ "Accept": "application/json,text/html;q=0.9,*/*;q=0.8",
34
+ }
35
+ _SUFFIX_MULTIPLIERS = {
36
+ "K": 1_000,
37
+ "M": 1_000_000,
38
+ "B": 1_000_000_000,
39
+ "T": 1_000_000_000_000,
40
+ }
41
+
42
+
43
+ def _iso_now() -> str:
44
+ return datetime.now(timezone.utc).replace(microsecond=0).isoformat()
45
+
46
+
47
+ def _to_float(value: Any) -> Optional[float]:
48
+ try:
49
+ if value is None:
50
+ return None
51
+ return float(value)
52
+ except (TypeError, ValueError):
53
+ return None
54
+
55
+
56
+ def _parse_compact(raw: str) -> Optional[float]:
57
+ value = raw.strip().replace(",", "").replace("$", "").replace("~", "")
58
+ if not value or value.upper() == "N/A":
59
+ return None
60
+ suffix = value[-1].upper()
61
+ mult = _SUFFIX_MULTIPLIERS.get(suffix, 1.0)
62
+ if suffix in _SUFFIX_MULTIPLIERS:
63
+ value = value[:-1]
64
+ try:
65
+ return float(value) * mult
66
+ except ValueError:
67
+ return None
68
+
69
+
70
+ def _parse_percent(raw: str) -> Optional[float]:
71
+ val = raw.strip().replace("%", "")
72
+ try:
73
+ if not val or val.upper() == "N/A":
74
+ return None
75
+ return float(val)
76
+ except ValueError:
77
+ return None
78
+
79
+
80
+ def _http_get_json(url: str) -> dict[str, Any]:
81
+ last_error: Optional[Exception] = None
82
+ for attempt in range(1, _MAX_RETRIES + 1):
83
+ try:
84
+ response = requests.get(url, headers=_HEADERS, timeout=_TIMEOUT)
85
+ if response.status_code in _RETRYABLE_STATUS_CODES:
86
+ raise requests.HTTPError(
87
+ f"Retryable status {response.status_code}",
88
+ response=response,
89
+ )
90
+ response.raise_for_status()
91
+ return response.json()
92
+ except (requests.RequestException, ValueError) as exc:
93
+ last_error = exc
94
+ if attempt >= _MAX_RETRIES:
95
+ break
96
+ backoff = _INITIAL_BACKOFF_SECONDS * (2 ** (attempt - 1))
97
+ time.sleep(backoff)
98
+ assert last_error is not None
99
+ raise last_error
100
+
101
+
102
+ def _http_get_text(url: str) -> str:
103
+ last_error: Optional[Exception] = None
104
+ for attempt in range(1, _MAX_RETRIES + 1):
105
+ try:
106
+ response = requests.get(url, headers=_HEADERS, timeout=_TIMEOUT)
107
+ if response.status_code in _RETRYABLE_STATUS_CODES:
108
+ raise requests.HTTPError(
109
+ f"Retryable status {response.status_code}",
110
+ response=response,
111
+ )
112
+ response.raise_for_status()
113
+ return response.text
114
+ except requests.RequestException as exc:
115
+ last_error = exc
116
+ if attempt >= _MAX_RETRIES:
117
+ break
118
+ backoff = _INITIAL_BACKOFF_SECONDS * (2 ** (attempt - 1))
119
+ time.sleep(backoff)
120
+ assert last_error is not None
121
+ raise last_error
122
+
123
+
124
+ def _fetch_yahoo(ticker: str) -> Optional[dict[str, Any]]:
125
+ url = f"https://query1.finance.yahoo.com/v7/finance/quote?symbols={ticker}"
126
+ payload = _http_get_json(url)
127
+ rows = payload.get("quoteResponse", {}).get("result", [])
128
+ if not rows:
129
+ return None
130
+
131
+ q = rows[0]
132
+ price = _to_float(q.get("regularMarketPrice"))
133
+ if price is None:
134
+ return None
135
+
136
+ return {
137
+ "provider": "yahoo",
138
+ "price": price,
139
+ "currency": q.get("currency", "USD"),
140
+ "market_cap": _to_float(q.get("marketCap")),
141
+ "volume": _to_float(q.get("regularMarketVolume")),
142
+ "high_52w": _to_float(q.get("fiftyTwoWeekHigh")),
143
+ "low_52w": _to_float(q.get("fiftyTwoWeekLow")),
144
+ "pe": _to_float(q.get("trailingPE")),
145
+ "dividend_yield_pct": (
146
+ _to_float(q.get("dividendYield")) * 100.0
147
+ if _to_float(q.get("dividendYield")) is not None
148
+ else None
149
+ ),
150
+ "beta": _to_float(q.get("beta")),
151
+ }
152
+
153
+
154
+ def _extract_finviz_map(html: str) -> dict[str, str]:
155
+ pairs = re.findall(r"<td[^>]*>([^<]+)</td><td[^>]*>(?:<b>)?([^<]+)", html)
156
+ out: dict[str, str] = {}
157
+ for key, value in pairs:
158
+ out[key.strip()] = value.strip()
159
+ return out
160
+
161
+
162
+ def _fetch_finviz(ticker: str) -> Optional[dict[str, Any]]:
163
+ url = f"https://finviz.com/quote.ashx?t={ticker.upper()}"
164
+ html = _http_get_text(url)
165
+ data = _extract_finviz_map(html)
166
+
167
+ price = _parse_compact(data.get("Price", ""))
168
+ if price is None:
169
+ return None
170
+
171
+ low_52w = None
172
+ high_52w = None
173
+ range_raw = data.get("52W Range", "")
174
+ m = re.search(r"([0-9]+\.?[0-9]*)\s*-\s*([0-9]+\.?[0-9]*)", range_raw)
175
+ if m:
176
+ low_52w = _to_float(m.group(1))
177
+ high_52w = _to_float(m.group(2))
178
+
179
+ return {
180
+ "provider": "finviz",
181
+ "price": price,
182
+ "currency": "USD",
183
+ "market_cap": _parse_compact(data.get("Market Cap", "")),
184
+ "volume": _parse_compact(data.get("Volume", "")),
185
+ "high_52w": high_52w,
186
+ "low_52w": low_52w,
187
+ "pe": _parse_compact(data.get("P/E", "")),
188
+ "dividend_yield_pct": _parse_percent(data.get("Dividend %", "")),
189
+ "beta": _to_float(data.get("Beta")),
190
+ }
191
+
192
+
193
+ def _fetch_stooq(ticker: str) -> Optional[dict[str, Any]]:
194
+ if "." in ticker:
195
+ return None
196
+ symbol = f"{ticker.lower()}.us"
197
+ url = f"https://stooq.com/q/l/?s={symbol}&f=sd2t2ohlcv&h&e=csv"
198
+ text = _http_get_text(url)
199
+
200
+ reader = csv.DictReader(io.StringIO(text))
201
+ row = next(reader, None)
202
+ if not row:
203
+ return None
204
+
205
+ close = _to_float(row.get("Close"))
206
+ if close is None:
207
+ return None
208
+
209
+ return {
210
+ "provider": "stooq",
211
+ "price": close,
212
+ "currency": "USD",
213
+ "market_cap": None,
214
+ "volume": _to_float(row.get("Volume")),
215
+ "high_52w": None,
216
+ "low_52w": None,
217
+ "pe": None,
218
+ "dividend_yield_pct": None,
219
+ "beta": None,
220
+ }
221
+
222
+
223
+ def _collect_market_data(ticker: str) -> Optional[dict[str, Any]]:
224
+ for fetcher in (_fetch_yahoo, _fetch_finviz, _fetch_stooq):
225
+ try:
226
+ result = fetcher(ticker)
227
+ except Exception:
228
+ result = None
229
+ if result and result.get("price") is not None:
230
+ return result
231
+ return None
232
+
233
+
234
+ def get_quote(ticker: str) -> dict[str, Any]:
235
+ """Return quote-level market data (price/cap/volume/52w range)."""
236
+ normalized = ticker.strip().upper()
237
+ result = _collect_market_data(normalized)
238
+ if not result:
239
+ raise RuntimeError(f"No quote data available for {normalized}")
240
+
241
+ return {
242
+ "ticker": normalized,
243
+ "provider": result["provider"],
244
+ "price": result["price"],
245
+ "currency": result.get("currency", "USD"),
246
+ "market_cap": result.get("market_cap"),
247
+ "volume": result.get("volume"),
248
+ "high_52w": result.get("high_52w"),
249
+ "low_52w": result.get("low_52w"),
250
+ "as_of_utc": _iso_now(),
251
+ }
252
+
253
+
254
+ def get_ratios(ticker: str) -> dict[str, Any]:
255
+ """Return ratio-level market data (P/E, dividend yield, beta)."""
256
+ normalized = ticker.strip().upper()
257
+
258
+ # Prefer Yahoo for ratios; short-circuit once we get usable ratio data.
259
+ fallback: Optional[dict[str, Any]] = None
260
+ for fetcher in (_fetch_yahoo, _fetch_finviz, _fetch_stooq):
261
+ try:
262
+ result = fetcher(normalized)
263
+ except Exception:
264
+ result = None
265
+ if not result or result.get("price") is None:
266
+ continue
267
+ if fallback is None:
268
+ fallback = result
269
+ if any(result.get(k) is not None for k in ("pe", "dividend_yield_pct", "beta")):
270
+ chosen = result
271
+ break
272
+ else:
273
+ chosen = fallback
274
+
275
+ if not chosen:
276
+ raise RuntimeError(f"No market data available for {normalized}")
277
+
278
+ return {
279
+ "ticker": normalized,
280
+ "provider": chosen["provider"],
281
+ "pe": chosen.get("pe"),
282
+ "dividend_yield_pct": chosen.get("dividend_yield_pct"),
283
+ "beta": chosen.get("beta"),
284
+ "as_of_utc": _iso_now(),
285
+ }
286
+
287
+
288
+ def _main() -> None:
289
+ parser = argparse.ArgumentParser(description="Standalone market data fetcher")
290
+ parser.add_argument("ticker", help="Ticker symbol, e.g. AAPL")
291
+ parser.add_argument("--indent", type=int, default=2, help="JSON indent")
292
+ args = parser.parse_args()
293
+
294
+ payload = {
295
+ "quote": get_quote(args.ticker),
296
+ "ratios": get_ratios(args.ticker),
297
+ }
298
+ print(json.dumps(payload, indent=args.indent, sort_keys=False))
299
+
300
+
301
+ if __name__ == "__main__":
302
+ _main()
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "opencode-skills-antigravity",
3
- "version": "1.0.11",
3
+ "version": "1.0.13",
4
4
  "description": "OpenCode CLI plugin that automatically downloads and keeps Antigravity Awesome Skills up to date.",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",