opencode-skills-antigravity 1.0.11 → 1.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bundled-skills/aegisops-ai/SKILL.md +127 -0
- package/bundled-skills/docs/integrations/jetski-cortex.md +3 -3
- package/bundled-skills/docs/integrations/jetski-gemini-loader/README.md +1 -1
- package/bundled-skills/docs/maintainers/repo-growth-seo.md +3 -3
- package/bundled-skills/docs/maintainers/skills-update-guide.md +1 -1
- package/bundled-skills/docs/users/bundles.md +1 -1
- package/bundled-skills/docs/users/claude-code-skills.md +1 -1
- package/bundled-skills/docs/users/gemini-cli-skills.md +1 -1
- package/bundled-skills/docs/users/getting-started.md +1 -1
- package/bundled-skills/docs/users/kiro-integration.md +1 -1
- package/bundled-skills/docs/users/usage.md +4 -4
- package/bundled-skills/docs/users/visual-guide.md +4 -4
- package/bundled-skills/snowflake-development/SKILL.md +228 -0
- package/bundled-skills/wordpress/SKILL.md +281 -4
- package/bundled-skills/wordpress-penetration-testing/SKILL.md +106 -1
- package/bundled-skills/wordpress-plugin-development/SKILL.md +296 -3
- package/bundled-skills/wordpress-theme-development/SKILL.md +316 -3
- package/bundled-skills/wordpress-woocommerce-development/SKILL.md +442 -2
- package/bundled-skills/xvary-stock-research/LICENSE +21 -0
- package/bundled-skills/xvary-stock-research/SKILL.md +103 -0
- package/bundled-skills/xvary-stock-research/assets/nvda-deep-dive-hero.png +0 -0
- package/bundled-skills/xvary-stock-research/assets/nvda-deep-dive-scenarios.png +0 -0
- package/bundled-skills/xvary-stock-research/assets/nvda-deep-dive-thesis.png +0 -0
- package/bundled-skills/xvary-stock-research/assets/social-preview.png +0 -0
- package/bundled-skills/xvary-stock-research/examples/nvda-analysis.md +60 -0
- package/bundled-skills/xvary-stock-research/references/edgar-guide.md +53 -0
- package/bundled-skills/xvary-stock-research/references/methodology.md +153 -0
- package/bundled-skills/xvary-stock-research/references/scoring.md +111 -0
- package/bundled-skills/xvary-stock-research/tests/test_edgar.py +90 -0
- package/bundled-skills/xvary-stock-research/tests/test_market.py +113 -0
- package/bundled-skills/xvary-stock-research/tools/edgar.py +495 -0
- package/bundled-skills/xvary-stock-research/tools/market.py +302 -0
- package/package.json +1 -1
|
@@ -0,0 +1,495 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""Standalone SEC EDGAR fetcher for claude-code-stock-analysis-skill.
|
|
3
|
+
|
|
4
|
+
Public functions:
|
|
5
|
+
- get_cik(ticker)
|
|
6
|
+
- get_company_facts(ticker)
|
|
7
|
+
- get_financials(ticker)
|
|
8
|
+
- get_filings_metadata(ticker)
|
|
9
|
+
|
|
10
|
+
Examples:
|
|
11
|
+
python tools/edgar.py AAPL
|
|
12
|
+
python tools/edgar.py NVDA --mode filings
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
from __future__ import annotations
|
|
16
|
+
|
|
17
|
+
import argparse
|
|
18
|
+
import json
|
|
19
|
+
from collections import Counter, defaultdict
|
|
20
|
+
from datetime import datetime, timezone
|
|
21
|
+
import time
|
|
22
|
+
from typing import Any, Optional
|
|
23
|
+
|
|
24
|
+
import requests
|
|
25
|
+
|
|
26
|
+
_SEC_CIK_LOOKUP = "https://www.sec.gov/files/company_tickers.json"
|
|
27
|
+
_SEC_COMPANY_FACTS = "https://data.sec.gov/api/xbrl/companyfacts/CIK{cik}.json"
|
|
28
|
+
_SEC_SUBMISSIONS = "https://data.sec.gov/submissions/CIK{cik}.json"
|
|
29
|
+
_TIMEOUT = 25
|
|
30
|
+
_MAX_RETRIES = 3
|
|
31
|
+
_INITIAL_BACKOFF_SECONDS = 1.0
|
|
32
|
+
_RETRYABLE_STATUS_CODES = {429, 500, 502, 503, 504}
|
|
33
|
+
_ACCEPTED_FORMS = {"10-K", "10-Q", "20-F", "6-K"}
|
|
34
|
+
_ANNUAL_FORMS = {"10-K", "20-F"}
|
|
35
|
+
_QUARTERLY_FORMS = {"10-Q", "6-K"}
|
|
36
|
+
_HEADERS = {
|
|
37
|
+
"User-Agent": "claude-code-stock-analysis-skill/1.0 (research@xvary.com)",
|
|
38
|
+
"Accept": "application/json",
|
|
39
|
+
"Accept-Encoding": "gzip, deflate",
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
# statement -> field -> accepted concept labels (US-GAAP + IFRS aliases)
|
|
43
|
+
_FIELD_CONCEPTS: dict[str, dict[str, tuple[str, ...]]] = {
|
|
44
|
+
"income_statement": {
|
|
45
|
+
"revenue": (
|
|
46
|
+
"Revenues",
|
|
47
|
+
"RevenueFromContractWithCustomerExcludingAssessedTax",
|
|
48
|
+
"Revenue",
|
|
49
|
+
"RevenueFromContractsWithCustomers",
|
|
50
|
+
"RevenueFromRenderingOfServices",
|
|
51
|
+
),
|
|
52
|
+
"gross_profit": ("GrossProfit",),
|
|
53
|
+
"operating_income": ("OperatingIncomeLoss", "ProfitLossFromOperatingActivities"),
|
|
54
|
+
"net_income": (
|
|
55
|
+
"NetIncomeLoss",
|
|
56
|
+
"ProfitLoss",
|
|
57
|
+
"ProfitLossAttributableToOwnersOfParent",
|
|
58
|
+
),
|
|
59
|
+
"eps_diluted": ("EarningsPerShareDiluted", "DilutedEarningsLossPerShare"),
|
|
60
|
+
"eps_basic": (
|
|
61
|
+
"EarningsPerShareBasic",
|
|
62
|
+
"BasicEarningsLossPerShare",
|
|
63
|
+
"BasicAndDilutedEarningsLossPerShare",
|
|
64
|
+
),
|
|
65
|
+
"r_and_d": ("ResearchAndDevelopmentExpense",),
|
|
66
|
+
"sga": (
|
|
67
|
+
"SellingGeneralAndAdministrativeExpense",
|
|
68
|
+
"GeneralAndAdministrativeExpense",
|
|
69
|
+
),
|
|
70
|
+
"interest_expense": (
|
|
71
|
+
"InterestExpense",
|
|
72
|
+
"FinanceCosts",
|
|
73
|
+
"BorrowingCostsRecognisedAsExpense",
|
|
74
|
+
),
|
|
75
|
+
"income_tax_expense": ("IncomeTaxExpenseBenefit",),
|
|
76
|
+
},
|
|
77
|
+
"balance_sheet": {
|
|
78
|
+
"total_assets": ("Assets",),
|
|
79
|
+
"current_assets": ("AssetsCurrent", "CurrentAssets"),
|
|
80
|
+
"current_liabilities": ("LiabilitiesCurrent", "CurrentLiabilities"),
|
|
81
|
+
"total_liabilities": ("Liabilities",),
|
|
82
|
+
"stockholders_equity": ("StockholdersEquity", "Equity"),
|
|
83
|
+
"cash_and_equivalents": (
|
|
84
|
+
"CashAndCashEquivalentsAtCarryingValue",
|
|
85
|
+
"CashAndCashEquivalents",
|
|
86
|
+
),
|
|
87
|
+
"long_term_debt": ("LongTermDebt", "LongTermDebtNoncurrent", "LongtermBorrowings"),
|
|
88
|
+
"short_term_borrowings": (
|
|
89
|
+
"ShortTermBorrowings",
|
|
90
|
+
"CurrentPortionOfLongtermBorrowings",
|
|
91
|
+
),
|
|
92
|
+
"shares_outstanding": (
|
|
93
|
+
"CommonStockSharesOutstanding",
|
|
94
|
+
"EntityCommonStockSharesOutstanding",
|
|
95
|
+
"NumberOfSharesIssued",
|
|
96
|
+
"ShareIssued",
|
|
97
|
+
"OrdinarySharesNumber",
|
|
98
|
+
),
|
|
99
|
+
},
|
|
100
|
+
"cash_flow": {
|
|
101
|
+
"operating_cash_flow": (
|
|
102
|
+
"NetCashProvidedByOperatingActivities",
|
|
103
|
+
"OperatingCashFlow",
|
|
104
|
+
"CashFlowsFromUsedInOperatingActivities",
|
|
105
|
+
"NetCashProvidedByUsedInOperatingActivities",
|
|
106
|
+
),
|
|
107
|
+
"capex": (
|
|
108
|
+
"PaymentsToAcquirePropertyPlantAndEquipment",
|
|
109
|
+
"PurchaseOfPropertyPlantAndEquipmentClassifiedAsInvestingActivities",
|
|
110
|
+
),
|
|
111
|
+
"depreciation_amortization": (
|
|
112
|
+
"DepreciationDepletionAndAmortization",
|
|
113
|
+
"Depreciation",
|
|
114
|
+
"DepreciationAndAmortization",
|
|
115
|
+
"DepreciationExpense",
|
|
116
|
+
),
|
|
117
|
+
"stock_based_compensation": (
|
|
118
|
+
"StockBasedCompensation",
|
|
119
|
+
"ShareBasedCompensation",
|
|
120
|
+
"AdjustmentsForSharebasedPayments",
|
|
121
|
+
),
|
|
122
|
+
"dividends_paid": (
|
|
123
|
+
"DividendsCommonStockCash",
|
|
124
|
+
"DividendsPaid",
|
|
125
|
+
"DividendsPaidOrdinarySharesPerShare",
|
|
126
|
+
),
|
|
127
|
+
},
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def _concept_map() -> dict[str, tuple[str, str]]:
|
|
132
|
+
out: dict[str, tuple[str, str]] = {}
|
|
133
|
+
for statement, fields in _FIELD_CONCEPTS.items():
|
|
134
|
+
for field, concepts in fields.items():
|
|
135
|
+
for concept in concepts:
|
|
136
|
+
out[concept] = (statement, field)
|
|
137
|
+
return out
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
_CONCEPT_MAP = _concept_map()
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def _field_concept_priority() -> dict[tuple[str, str], dict[str, int]]:
|
|
144
|
+
priorities: dict[tuple[str, str], dict[str, int]] = {}
|
|
145
|
+
for statement, fields in _FIELD_CONCEPTS.items():
|
|
146
|
+
for field, concepts in fields.items():
|
|
147
|
+
priorities[(statement, field)] = {
|
|
148
|
+
concept: idx for idx, concept in enumerate(concepts)
|
|
149
|
+
}
|
|
150
|
+
return priorities
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
_FIELD_CONCEPT_PRIORITY = _field_concept_priority()
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def _session() -> requests.Session:
|
|
157
|
+
s = requests.Session()
|
|
158
|
+
s.headers.update(_HEADERS)
|
|
159
|
+
return s
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def _request_json(url: str, session: requests.Session) -> dict[str, Any]:
|
|
163
|
+
last_error: Optional[Exception] = None
|
|
164
|
+
for attempt in range(1, _MAX_RETRIES + 1):
|
|
165
|
+
try:
|
|
166
|
+
response = session.get(url, timeout=_TIMEOUT)
|
|
167
|
+
if response.status_code in _RETRYABLE_STATUS_CODES:
|
|
168
|
+
raise requests.HTTPError(
|
|
169
|
+
f"Retryable status {response.status_code}",
|
|
170
|
+
response=response,
|
|
171
|
+
)
|
|
172
|
+
response.raise_for_status()
|
|
173
|
+
return response.json()
|
|
174
|
+
except (requests.RequestException, ValueError) as exc:
|
|
175
|
+
last_error = exc
|
|
176
|
+
if attempt >= _MAX_RETRIES:
|
|
177
|
+
break
|
|
178
|
+
backoff = _INITIAL_BACKOFF_SECONDS * (2 ** (attempt - 1))
|
|
179
|
+
time.sleep(backoff)
|
|
180
|
+
assert last_error is not None
|
|
181
|
+
raise last_error
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def _variants(ticker: str) -> list[str]:
|
|
185
|
+
t = ticker.strip().upper()
|
|
186
|
+
candidates = [
|
|
187
|
+
t,
|
|
188
|
+
t.replace(".", "-"),
|
|
189
|
+
t.replace("-", "."),
|
|
190
|
+
t.replace(".", ""),
|
|
191
|
+
t.split(".")[0],
|
|
192
|
+
t.split("-")[0],
|
|
193
|
+
]
|
|
194
|
+
out: list[str] = []
|
|
195
|
+
for c in candidates:
|
|
196
|
+
if c and c not in out:
|
|
197
|
+
out.append(c)
|
|
198
|
+
return out
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def _parse_period_months(start: Optional[str], end: Optional[str]) -> Optional[int]:
|
|
202
|
+
if not end:
|
|
203
|
+
return None
|
|
204
|
+
if not start:
|
|
205
|
+
return 0
|
|
206
|
+
try:
|
|
207
|
+
s = datetime.strptime(start, "%Y-%m-%d")
|
|
208
|
+
e = datetime.strptime(end, "%Y-%m-%d")
|
|
209
|
+
except ValueError:
|
|
210
|
+
return None
|
|
211
|
+
days = (e - s).days
|
|
212
|
+
if days <= 0:
|
|
213
|
+
return 0
|
|
214
|
+
if days <= 120:
|
|
215
|
+
return 3
|
|
216
|
+
if days <= 210:
|
|
217
|
+
return 6
|
|
218
|
+
if days <= 310:
|
|
219
|
+
return 9
|
|
220
|
+
return 12
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def _is_quarterly(form: str, period_months: Optional[int]) -> bool:
|
|
224
|
+
if form in _QUARTERLY_FORMS:
|
|
225
|
+
return True
|
|
226
|
+
return period_months is not None and 1 <= period_months <= 4
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def _to_float(value: Any) -> Optional[float]:
|
|
230
|
+
try:
|
|
231
|
+
if value is None:
|
|
232
|
+
return None
|
|
233
|
+
return float(value)
|
|
234
|
+
except (TypeError, ValueError):
|
|
235
|
+
return None
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
def get_cik(ticker: str) -> Optional[str]:
|
|
239
|
+
"""Resolve ticker to zero-padded SEC CIK."""
|
|
240
|
+
with _session() as s:
|
|
241
|
+
data = _request_json(_SEC_CIK_LOOKUP, s)
|
|
242
|
+
lookup: dict[str, str] = {}
|
|
243
|
+
for entry in data.values():
|
|
244
|
+
if not isinstance(entry, dict):
|
|
245
|
+
continue
|
|
246
|
+
symbol = str(entry.get("ticker", "")).strip().upper()
|
|
247
|
+
cik_raw = entry.get("cik_str")
|
|
248
|
+
if symbol and cik_raw is not None:
|
|
249
|
+
lookup[symbol] = str(cik_raw).zfill(10)
|
|
250
|
+
for candidate in _variants(ticker):
|
|
251
|
+
if candidate in lookup:
|
|
252
|
+
return lookup[candidate]
|
|
253
|
+
return None
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def get_company_facts(ticker: str) -> dict[str, Any]:
|
|
257
|
+
"""Fetch raw EDGAR companyfacts payload for a ticker."""
|
|
258
|
+
normalized = ticker.strip().upper()
|
|
259
|
+
cik = get_cik(normalized)
|
|
260
|
+
if not cik:
|
|
261
|
+
raise ValueError(f"CIK not found for ticker: {normalized}")
|
|
262
|
+
with _session() as s:
|
|
263
|
+
facts = _request_json(_SEC_COMPANY_FACTS.format(cik=cik), s)
|
|
264
|
+
return {
|
|
265
|
+
"ticker": normalized,
|
|
266
|
+
"cik": cik,
|
|
267
|
+
"entity_name": facts.get("entityName", normalized),
|
|
268
|
+
"facts": facts.get("facts", {}),
|
|
269
|
+
"raw": facts,
|
|
270
|
+
"retrieved_utc": datetime.now(timezone.utc).replace(microsecond=0).isoformat(),
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
def get_filings_metadata(ticker: str, limit: int = 10) -> list[dict[str, Any]]:
|
|
275
|
+
"""Return recent SEC filing metadata for common report forms."""
|
|
276
|
+
normalized = ticker.strip().upper()
|
|
277
|
+
cik = get_cik(normalized)
|
|
278
|
+
if not cik:
|
|
279
|
+
raise ValueError(f"CIK not found for ticker: {normalized}")
|
|
280
|
+
|
|
281
|
+
with _session() as s:
|
|
282
|
+
payload = _request_json(_SEC_SUBMISSIONS.format(cik=cik), s)
|
|
283
|
+
|
|
284
|
+
recent = payload.get("filings", {}).get("recent", {})
|
|
285
|
+
forms = recent.get("form", [])
|
|
286
|
+
filing_dates = recent.get("filingDate", [])
|
|
287
|
+
report_dates = recent.get("reportDate", [])
|
|
288
|
+
accessions = recent.get("accessionNumber", [])
|
|
289
|
+
docs = recent.get("primaryDocument", [])
|
|
290
|
+
|
|
291
|
+
rows: list[dict[str, Any]] = []
|
|
292
|
+
for index, form in enumerate(forms):
|
|
293
|
+
if form not in _ACCEPTED_FORMS:
|
|
294
|
+
continue
|
|
295
|
+
rows.append(
|
|
296
|
+
{
|
|
297
|
+
"form": form,
|
|
298
|
+
"filing_date": filing_dates[index] if index < len(filing_dates) else None,
|
|
299
|
+
"report_date": report_dates[index] if index < len(report_dates) else None,
|
|
300
|
+
"accession_number": accessions[index] if index < len(accessions) else None,
|
|
301
|
+
"primary_document": docs[index] if index < len(docs) else None,
|
|
302
|
+
}
|
|
303
|
+
)
|
|
304
|
+
if len(rows) >= limit:
|
|
305
|
+
break
|
|
306
|
+
return rows
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
def _extract_line_items(company_facts: dict[str, Any]) -> dict[tuple[str, str], list[dict[str, Any]]]:
|
|
310
|
+
root = company_facts.get("facts", {})
|
|
311
|
+
items: dict[tuple[str, str], list[dict[str, Any]]] = defaultdict(list)
|
|
312
|
+
|
|
313
|
+
for namespace in ("us-gaap", "ifrs-full"):
|
|
314
|
+
ns = root.get(namespace, {})
|
|
315
|
+
if not isinstance(ns, dict):
|
|
316
|
+
continue
|
|
317
|
+
for concept, concept_payload in ns.items():
|
|
318
|
+
mapped = _CONCEPT_MAP.get(concept)
|
|
319
|
+
if not mapped:
|
|
320
|
+
continue
|
|
321
|
+
statement, field = mapped
|
|
322
|
+
units = concept_payload.get("units", {})
|
|
323
|
+
if not isinstance(units, dict):
|
|
324
|
+
continue
|
|
325
|
+
for unit, entries in units.items():
|
|
326
|
+
for entry in entries:
|
|
327
|
+
form = entry.get("form", "")
|
|
328
|
+
if form not in _ACCEPTED_FORMS:
|
|
329
|
+
continue
|
|
330
|
+
value = _to_float(entry.get("val"))
|
|
331
|
+
if value is None:
|
|
332
|
+
continue
|
|
333
|
+
end = entry.get("end")
|
|
334
|
+
if not end:
|
|
335
|
+
continue
|
|
336
|
+
start = entry.get("start")
|
|
337
|
+
items[(statement, field)].append(
|
|
338
|
+
{
|
|
339
|
+
"value": value,
|
|
340
|
+
"unit": unit,
|
|
341
|
+
"form": form,
|
|
342
|
+
"period_end": end,
|
|
343
|
+
"period_start": start,
|
|
344
|
+
"period_months": _parse_period_months(start, end),
|
|
345
|
+
"filed": entry.get("filed"),
|
|
346
|
+
"concept": concept,
|
|
347
|
+
"namespace": namespace,
|
|
348
|
+
}
|
|
349
|
+
)
|
|
350
|
+
return items
|
|
351
|
+
|
|
352
|
+
|
|
353
|
+
def _best_entry(
|
|
354
|
+
records: list[dict[str, Any]],
|
|
355
|
+
quarterly: bool,
|
|
356
|
+
statement: str,
|
|
357
|
+
field: str,
|
|
358
|
+
) -> Optional[dict[str, Any]]:
|
|
359
|
+
if not records:
|
|
360
|
+
return None
|
|
361
|
+
scoped: list[dict[str, Any]] = []
|
|
362
|
+
for record in records:
|
|
363
|
+
is_q = _is_quarterly(record.get("form", ""), record.get("period_months"))
|
|
364
|
+
if quarterly and is_q:
|
|
365
|
+
scoped.append(record)
|
|
366
|
+
elif not quarterly and not is_q and record.get("form") in _ANNUAL_FORMS:
|
|
367
|
+
scoped.append(record)
|
|
368
|
+
|
|
369
|
+
if not scoped:
|
|
370
|
+
return None
|
|
371
|
+
|
|
372
|
+
concept_priority = _FIELD_CONCEPT_PRIORITY.get((statement, field), {})
|
|
373
|
+
if concept_priority:
|
|
374
|
+
default_rank = len(concept_priority) + 100
|
|
375
|
+
best_rank = min(concept_priority.get(r.get("concept", ""), default_rank) for r in scoped)
|
|
376
|
+
scoped = [
|
|
377
|
+
r
|
|
378
|
+
for r in scoped
|
|
379
|
+
if concept_priority.get(r.get("concept", ""), default_rank) == best_rank
|
|
380
|
+
]
|
|
381
|
+
|
|
382
|
+
unit_counts = Counter(r.get("unit") for r in scoped)
|
|
383
|
+
preferred_unit = unit_counts.most_common(1)[0][0]
|
|
384
|
+
scoped = [r for r in scoped if r.get("unit") == preferred_unit]
|
|
385
|
+
scoped.sort(key=lambda r: (r.get("period_end", ""), r.get("filed", "")), reverse=True)
|
|
386
|
+
return scoped[0]
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
def _build_snapshot(
|
|
390
|
+
line_items: dict[tuple[str, str], list[dict[str, Any]]],
|
|
391
|
+
quarterly: bool,
|
|
392
|
+
) -> tuple[dict[str, dict[str, float]], dict[str, dict[str, Any]], Optional[str]]:
|
|
393
|
+
snapshot: dict[str, dict[str, float]] = {
|
|
394
|
+
"income_statement": {},
|
|
395
|
+
"balance_sheet": {},
|
|
396
|
+
"cash_flow": {},
|
|
397
|
+
}
|
|
398
|
+
sources: dict[str, dict[str, Any]] = {}
|
|
399
|
+
period_end: Optional[str] = None
|
|
400
|
+
|
|
401
|
+
for (statement, field), records in line_items.items():
|
|
402
|
+
best = _best_entry(
|
|
403
|
+
records,
|
|
404
|
+
quarterly=quarterly,
|
|
405
|
+
statement=statement,
|
|
406
|
+
field=field,
|
|
407
|
+
)
|
|
408
|
+
if not best:
|
|
409
|
+
continue
|
|
410
|
+
snapshot[statement][field] = best["value"]
|
|
411
|
+
key = f"{statement}.{field}"
|
|
412
|
+
sources[key] = {
|
|
413
|
+
"form": best.get("form"),
|
|
414
|
+
"filed": best.get("filed"),
|
|
415
|
+
"period_end": best.get("period_end"),
|
|
416
|
+
"unit": best.get("unit"),
|
|
417
|
+
"concept": best.get("concept"),
|
|
418
|
+
"namespace": best.get("namespace"),
|
|
419
|
+
}
|
|
420
|
+
if best.get("period_end") and (not period_end or best["period_end"] > period_end):
|
|
421
|
+
period_end = best["period_end"]
|
|
422
|
+
|
|
423
|
+
return snapshot, sources, period_end
|
|
424
|
+
|
|
425
|
+
|
|
426
|
+
def get_financials(ticker: str) -> dict[str, Any]:
|
|
427
|
+
"""Return normalized annual + quarterly financial snapshots."""
|
|
428
|
+
company = get_company_facts(ticker)
|
|
429
|
+
line_items = _extract_line_items(company)
|
|
430
|
+
|
|
431
|
+
annual_snapshot, annual_sources, annual_period = _build_snapshot(
|
|
432
|
+
line_items, quarterly=False
|
|
433
|
+
)
|
|
434
|
+
quarterly_snapshot, quarterly_sources, quarterly_period = _build_snapshot(
|
|
435
|
+
line_items, quarterly=True
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
return {
|
|
439
|
+
"ticker": company["ticker"],
|
|
440
|
+
"cik": company["cik"],
|
|
441
|
+
"entity_name": company["entity_name"],
|
|
442
|
+
"annual": {
|
|
443
|
+
"period_end": annual_period,
|
|
444
|
+
"statements": annual_snapshot,
|
|
445
|
+
"sources": annual_sources,
|
|
446
|
+
},
|
|
447
|
+
"quarterly": {
|
|
448
|
+
"period_end": quarterly_period,
|
|
449
|
+
"statements": quarterly_snapshot,
|
|
450
|
+
"sources": quarterly_sources,
|
|
451
|
+
},
|
|
452
|
+
"retrieved_utc": datetime.now(timezone.utc).replace(microsecond=0).isoformat(),
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
def _main() -> None:
|
|
457
|
+
parser = argparse.ArgumentParser(description="Standalone EDGAR fetcher")
|
|
458
|
+
parser.add_argument("ticker", help="Ticker symbol, e.g. AAPL")
|
|
459
|
+
parser.add_argument(
|
|
460
|
+
"--mode",
|
|
461
|
+
default="financials",
|
|
462
|
+
choices=("financials", "facts", "filings"),
|
|
463
|
+
help="Output mode",
|
|
464
|
+
)
|
|
465
|
+
parser.add_argument(
|
|
466
|
+
"--indent",
|
|
467
|
+
type=int,
|
|
468
|
+
default=2,
|
|
469
|
+
help="JSON indent",
|
|
470
|
+
)
|
|
471
|
+
args = parser.parse_args()
|
|
472
|
+
|
|
473
|
+
if args.mode == "financials":
|
|
474
|
+
payload = get_financials(args.ticker)
|
|
475
|
+
elif args.mode == "facts":
|
|
476
|
+
payload = get_company_facts(args.ticker)
|
|
477
|
+
payload = {
|
|
478
|
+
"ticker": payload["ticker"],
|
|
479
|
+
"cik": payload["cik"],
|
|
480
|
+
"entity_name": payload["entity_name"],
|
|
481
|
+
"namespaces": list(payload.get("facts", {}).keys()),
|
|
482
|
+
"retrieved_utc": payload.get("retrieved_utc"),
|
|
483
|
+
}
|
|
484
|
+
else:
|
|
485
|
+
payload = {
|
|
486
|
+
"ticker": args.ticker.strip().upper(),
|
|
487
|
+
"filings": get_filings_metadata(args.ticker),
|
|
488
|
+
"retrieved_utc": datetime.now(timezone.utc).replace(microsecond=0).isoformat(),
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
print(json.dumps(payload, indent=args.indent, sort_keys=False))
|
|
492
|
+
|
|
493
|
+
|
|
494
|
+
if __name__ == "__main__":
|
|
495
|
+
_main()
|