scraper2-hj3415 2.4.1__py3-none-any.whl → 2.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. scraper2_hj3415/app/adapters/out/playwright/browser.py +26 -0
  2. {scraper2 → scraper2_hj3415/app}/adapters/out/playwright/browser_factory.py +7 -7
  3. scraper2_hj3415/app/adapters/out/playwright/capabilities/__init__.py +18 -0
  4. scraper2_hj3415/app/adapters/out/playwright/capabilities/_base.py +19 -0
  5. scraper2_hj3415/app/adapters/out/playwright/capabilities/interaction.py +37 -0
  6. scraper2_hj3415/app/adapters/out/playwright/capabilities/navigation.py +24 -0
  7. scraper2_hj3415/app/adapters/out/playwright/capabilities/scope.py +84 -0
  8. scraper2_hj3415/app/adapters/out/playwright/capabilities/table.py +90 -0
  9. scraper2_hj3415/app/adapters/out/playwright/capabilities/text.py +25 -0
  10. scraper2_hj3415/app/adapters/out/playwright/capabilities/wait.py +96 -0
  11. {scraper2 → scraper2_hj3415/app}/adapters/out/playwright/session.py +1 -1
  12. scraper2_hj3415/app/adapters/out/sinks/memory_sink.py +25 -0
  13. scraper2_hj3415/app/adapters/out/sinks/mongo_sink.py +63 -0
  14. {scraper2/adapters/out/sinks/memory → scraper2_hj3415/app/adapters/out/sinks}/store.py +14 -5
  15. scraper2_hj3415/app/adapters/site/wisereport_playwright.py +379 -0
  16. scraper2_hj3415/app/composition.py +225 -0
  17. scraper2_hj3415/app/domain/blocks.py +61 -0
  18. scraper2_hj3415/app/domain/constants.py +33 -0
  19. scraper2_hj3415/app/domain/doc.py +16 -0
  20. scraper2_hj3415/app/domain/endpoint.py +11 -0
  21. scraper2_hj3415/app/domain/series.py +11 -0
  22. scraper2_hj3415/app/domain/types.py +19 -0
  23. scraper2_hj3415/app/parsing/_normalize/label.py +92 -0
  24. scraper2_hj3415/app/parsing/_normalize/table.py +53 -0
  25. scraper2_hj3415/app/parsing/_normalize/text.py +31 -0
  26. scraper2_hj3415/app/parsing/_normalize/values.py +70 -0
  27. scraper2_hj3415/app/parsing/_tables/html_table.py +89 -0
  28. scraper2_hj3415/app/parsing/c101/__init__.py +0 -0
  29. scraper2_hj3415/app/parsing/c101/_sise_normalizer.py +103 -0
  30. scraper2_hj3415/app/parsing/c101/company_overview.py +47 -0
  31. scraper2_hj3415/app/parsing/c101/earning_surprise.py +217 -0
  32. scraper2_hj3415/app/parsing/c101/fundamentals.py +95 -0
  33. scraper2_hj3415/app/parsing/c101/major_shareholders.py +57 -0
  34. scraper2_hj3415/app/parsing/c101/sise.py +47 -0
  35. scraper2_hj3415/app/parsing/c101/summary_cmp.py +87 -0
  36. scraper2_hj3415/app/parsing/c101/yearly_consensus.py +197 -0
  37. scraper2_hj3415/app/parsing/c101_parser.py +45 -0
  38. scraper2_hj3415/app/parsing/c103_parser.py +22 -0
  39. scraper2_hj3415/app/parsing/c104_parser.py +26 -0
  40. scraper2_hj3415/app/parsing/c106_parser.py +137 -0
  41. scraper2_hj3415/app/parsing/c108_parser.py +254 -0
  42. scraper2_hj3415/app/ports/__init__.py +0 -0
  43. scraper2_hj3415/app/ports/browser/__init__.py +0 -0
  44. scraper2_hj3415/app/ports/browser/browser_factory_port.py +9 -0
  45. scraper2_hj3415/app/ports/browser/browser_port.py +32 -0
  46. scraper2_hj3415/app/ports/browser/capabilities/__init__.py +15 -0
  47. scraper2_hj3415/app/ports/browser/capabilities/interaction.py +27 -0
  48. scraper2_hj3415/app/ports/browser/capabilities/navigation.py +18 -0
  49. scraper2_hj3415/app/ports/browser/capabilities/scope.py +66 -0
  50. scraper2_hj3415/app/ports/browser/capabilities/table.py +28 -0
  51. scraper2_hj3415/app/ports/browser/capabilities/text.py +16 -0
  52. scraper2_hj3415/app/ports/browser/capabilities/wait.py +51 -0
  53. scraper2_hj3415/app/ports/ingest/__init__.py +0 -0
  54. scraper2_hj3415/app/ports/ingest/nfs_ingest_port.py +28 -0
  55. scraper2_hj3415/app/ports/sinks/__init__.py +0 -0
  56. scraper2_hj3415/app/ports/sinks/nfs_sink_port.py +20 -0
  57. scraper2_hj3415/app/ports/site/__init__.py +0 -0
  58. scraper2_hj3415/app/ports/site/wisereport_port.py +30 -0
  59. scraper2_hj3415/app/services/__init__.py +0 -0
  60. scraper2_hj3415/app/services/fetch/__init__.py +0 -0
  61. scraper2_hj3415/app/services/fetch/fetch_c101.py +59 -0
  62. scraper2_hj3415/app/services/fetch/fetch_c103.py +121 -0
  63. scraper2_hj3415/app/services/fetch/fetch_c104.py +160 -0
  64. scraper2_hj3415/app/services/fetch/fetch_c106.py +90 -0
  65. scraper2_hj3415/app/services/fetch/fetch_c108.py +59 -0
  66. scraper2_hj3415/app/services/nfs_doc_builders.py +304 -0
  67. scraper2_hj3415/app/usecases/__init__.py +0 -0
  68. scraper2_hj3415/app/usecases/ingest/__init__.py +0 -0
  69. scraper2_hj3415/app/usecases/ingest/ingest_c101.py +111 -0
  70. scraper2_hj3415/app/usecases/ingest/ingest_c103.py +162 -0
  71. scraper2_hj3415/app/usecases/ingest/ingest_c104.py +182 -0
  72. scraper2_hj3415/app/usecases/ingest/ingest_c106.py +136 -0
  73. scraper2_hj3415/app/usecases/ingest/ingest_c108.py +122 -0
  74. scraper2/main.py → scraper2_hj3415/cli.py +45 -72
  75. {scraper2_hj3415-2.4.1.dist-info → scraper2_hj3415-2.7.0.dist-info}/METADATA +3 -1
  76. scraper2_hj3415-2.7.0.dist-info/RECORD +93 -0
  77. scraper2_hj3415-2.7.0.dist-info/entry_points.txt +3 -0
  78. scraper2/adapters/out/playwright/browser.py +0 -102
  79. scraper2/adapters/out/sinks/memory/__init__.py +0 -15
  80. scraper2/adapters/out/sinks/memory/c101_memory_sink.py +0 -26
  81. scraper2/adapters/out/sinks/memory/c103_memory_sink.py +0 -26
  82. scraper2/adapters/out/sinks/memory/c104_memory_sink.py +0 -26
  83. scraper2/adapters/out/sinks/memory/c106_memory_sink.py +0 -26
  84. scraper2/adapters/out/sinks/memory/c108_memory_sink.py +0 -26
  85. scraper2/adapters/out/sinks/mongo/__init__.py +0 -14
  86. scraper2/adapters/out/sinks/mongo/c101_mongo_sink.py +0 -43
  87. scraper2/adapters/out/sinks/mongo/c103_mongo_sink.py +0 -41
  88. scraper2/adapters/out/sinks/mongo/c104_mongo_sink.py +0 -41
  89. scraper2/adapters/out/sinks/mongo/c106_mongo_sink.py +0 -41
  90. scraper2/adapters/out/sinks/mongo/c108_mongo_sink.py +0 -41
  91. scraper2/app/composition.py +0 -204
  92. scraper2/app/parsing/_converters.py +0 -85
  93. scraper2/app/parsing/_normalize.py +0 -134
  94. scraper2/app/parsing/c101_parser.py +0 -143
  95. scraper2/app/parsing/c103_parser.py +0 -128
  96. scraper2/app/parsing/c104_parser.py +0 -143
  97. scraper2/app/parsing/c106_parser.py +0 -153
  98. scraper2/app/parsing/c108_parser.py +0 -65
  99. scraper2/app/ports/browser/browser_factory_port.py +0 -11
  100. scraper2/app/ports/browser/browser_port.py +0 -22
  101. scraper2/app/ports/ingest_port.py +0 -14
  102. scraper2/app/ports/sinks/base_sink_port.py +0 -14
  103. scraper2/app/ports/sinks/c101_sink_port.py +0 -9
  104. scraper2/app/ports/sinks/c103_sink_port.py +0 -9
  105. scraper2/app/ports/sinks/c104_sink_port.py +0 -9
  106. scraper2/app/ports/sinks/c106_sink_port.py +0 -9
  107. scraper2/app/ports/sinks/c108_sink_port.py +0 -9
  108. scraper2/app/usecases/fetch/fetch_c101.py +0 -43
  109. scraper2/app/usecases/fetch/fetch_c103.py +0 -103
  110. scraper2/app/usecases/fetch/fetch_c104.py +0 -76
  111. scraper2/app/usecases/fetch/fetch_c106.py +0 -90
  112. scraper2/app/usecases/fetch/fetch_c108.py +0 -49
  113. scraper2/app/usecases/ingest/ingest_c101.py +0 -36
  114. scraper2/app/usecases/ingest/ingest_c103.py +0 -37
  115. scraper2/app/usecases/ingest/ingest_c104.py +0 -37
  116. scraper2/app/usecases/ingest/ingest_c106.py +0 -38
  117. scraper2/app/usecases/ingest/ingest_c108.py +0 -39
  118. scraper2_hj3415-2.4.1.dist-info/RECORD +0 -63
  119. scraper2_hj3415-2.4.1.dist-info/entry_points.txt +0 -3
  120. {scraper2 → scraper2_hj3415}/.DS_Store +0 -0
  121. {scraper2 → scraper2_hj3415}/__init__.py +0 -0
  122. {scraper2/adapters/out → scraper2_hj3415/app}/__init__.py +0 -0
  123. {scraper2/adapters/out/playwright → scraper2_hj3415/app/adapters}/__init__.py +0 -0
  124. {scraper2 → scraper2_hj3415/app}/adapters/out/.DS_Store +0 -0
  125. {scraper2/app → scraper2_hj3415/app/adapters/out}/__init__.py +0 -0
  126. {scraper2/app/parsing → scraper2_hj3415/app/adapters/out/playwright}/__init__.py +0 -0
  127. {scraper2 → scraper2_hj3415/app}/adapters/out/sinks/.DS_Store +0 -0
  128. {scraper2/app/ports → scraper2_hj3415/app/adapters/out/sinks}/__init__.py +0 -0
  129. {scraper2/app/ports/browser → scraper2_hj3415/app/adapters/site}/__init__.py +0 -0
  130. {scraper2/app/ports/sinks → scraper2_hj3415/app/domain}/__init__.py +0 -0
  131. {scraper2/app/usecases → scraper2_hj3415/app/parsing}/__init__.py +0 -0
  132. {scraper2/app/usecases/fetch → scraper2_hj3415/app/parsing/_normalize}/__init__.py +0 -0
  133. {scraper2/app/usecases/ingest → scraper2_hj3415/app/parsing/_tables}/__init__.py +0 -0
  134. {scraper2_hj3415-2.4.1.dist-info → scraper2_hj3415-2.7.0.dist-info}/WHEEL +0 -0
  135. {scraper2_hj3415-2.4.1.dist-info → scraper2_hj3415-2.7.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,45 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any
4
+ from scraper2_hj3415.app.ports.browser.browser_port import BrowserPort
5
+ from logging_hj3415 import logger
6
+
7
+ from .c101.sise import parse_c101_sise_table
8
+ from .c101.earning_surprise import parse_c101_earnings_surprise_table
9
+ from .c101.fundamentals import parse_c101_fundamentals_table
10
+ from .c101.major_shareholders import parse_c101_major_shareholders
11
+ from .c101.company_overview import parse_c101_company_overview
12
+ from .c101.summary_cmp import parse_c101_summary_cmp_table
13
+ from .c101.yearly_consensus import parse_c101_yearly_consensus_table
14
+
15
+ async def parse_c101_to_dict(browser: BrowserPort) -> dict[str, list[dict[str, Any]]]:
16
+ parsed_summary_cmp = await parse_c101_summary_cmp_table(browser)
17
+ logger.debug(f"parsed_summary_cmp data: {parsed_summary_cmp}")
18
+
19
+ parsed_sise = await parse_c101_sise_table(browser)
20
+ logger.debug(f"parsed_sise data: {parsed_sise}")
21
+
22
+ parsed_company_overview = await parse_c101_company_overview(browser)
23
+ logger.debug(f"parsed_company_overview data: {parsed_company_overview}")
24
+
25
+ parsed_major_shareholders = await parse_c101_major_shareholders(browser)
26
+ logger.debug(f"parsed_major_shareholders data: {parsed_major_shareholders}")
27
+
28
+ parsed_fundamentals = await parse_c101_fundamentals_table(browser)
29
+ logger.debug(f"parsed_fundamentals data: {parsed_fundamentals}")
30
+
31
+ parsed_earnings_surprise = await parse_c101_earnings_surprise_table(browser)
32
+ logger.debug(f"parsed_earnings_surprise data: {parsed_earnings_surprise}")
33
+
34
+ parsed_yearly_consensus = await parse_c101_yearly_consensus_table(browser)
35
+ logger.debug(f"parsed_yearly_consensus data: {parsed_yearly_consensus}")
36
+
37
+ return {
38
+ "요약": parsed_summary_cmp,
39
+ "시세": parsed_sise,
40
+ "주주현황": parsed_major_shareholders,
41
+ "기업개요": parsed_company_overview,
42
+ "펀더멘털": parsed_fundamentals,
43
+ "어닝서프라이즈": parsed_earnings_surprise,
44
+ "연간컨센서스": parsed_yearly_consensus,
45
+ }
@@ -0,0 +1,22 @@
1
+ # scraper2_hj3415/app/parsing/c103_parser.py
2
+ from __future__ import annotations
3
+ from typing import Any
4
+
5
+ from scraper2_hj3415.app.ports.browser.browser_port import BrowserPort
6
+ from scraper2_hj3415.app.parsing._tables.html_table import (
7
+ try_html_table_to_df,
8
+ df_to_c1034_metric_list,
9
+ )
10
+
11
+ TABLE_XPATH = "xpath=//div[@id='wrapper']//div//table"
12
+ TABLE_INDEX = 2
13
+
14
+
15
+ async def parse_c103_current_table(browser: BrowserPort) -> list[dict[str, Any]]:
16
+ """
17
+ ✅ 현재 화면 상태(탭/연간/분기/검색 결과)가 이미 준비되었다는 전제.
18
+ 이 상태에서 TABLE_INDEX 테이블만 읽어서 rows로 변환한다.
19
+ """
20
+ html = await browser.outer_html_nth(TABLE_XPATH, TABLE_INDEX)
21
+ df = try_html_table_to_df(html)
22
+ return df_to_c1034_metric_list(df)
@@ -0,0 +1,26 @@
1
+ # scraper2_hj3415/app/parsing/c104_parser.py
2
+ from __future__ import annotations
3
+
4
+ from typing import Any
5
+
6
+ from scraper2_hj3415.app.ports.browser.browser_port import BrowserPort
7
+ from scraper2_hj3415.app.parsing._tables.html_table import (
8
+ try_html_table_to_df,
9
+ df_to_c1034_metric_list,
10
+ )
11
+
12
+ TABLE_XPATH = 'xpath=//table[@class="gHead01 all-width data-list"]'
13
+
14
+
15
+ async def parse_c104_current_table(
16
+ browser: BrowserPort,
17
+ *,
18
+ table_index: int,
19
+ ) -> list[dict[str, Any]]:
20
+ """
21
+ ✅ 현재 화면 상태(탭/연간/분기/검색 결과)가 이미 준비되었다는 전제.
22
+ 이 상태에서 지정된 table_index 테이블만 읽어서 rows로 변환한다.
23
+ """
24
+ html = await browser.outer_html_nth(TABLE_XPATH, table_index)
25
+ df = try_html_table_to_df(html)
26
+ return df_to_c1034_metric_list(df)
@@ -0,0 +1,137 @@
1
+ # scraper2_hj3415/app/parsing/c106_parser.py
2
+ from __future__ import annotations
3
+
4
+ from io import StringIO
5
+ import re
6
+ import numpy as np
7
+ import pandas as pd
8
+ from typing import Any
9
+
10
+ from common_hj3415.utils import clean_text
11
+ from scraper2_hj3415.app.ports.browser.browser_port import BrowserPort
12
+ from scraper2_hj3415.app.parsing._normalize.label import (
13
+ normalize_metric_label,
14
+ sanitize_label,
15
+ )
16
+ from logging_hj3415 import logger
17
+
18
+ _CODE_RE = re.compile(r"\b\d{6}\b")
19
+
20
+
21
+ async def parse_c106_header_codes(browser: BrowserPort) -> list[str]:
22
+ """
23
+ 현재 페이지에서 '기업간비교자료' 헤더(회사명들)에서 종목코드(6자리)만 추출한다.
24
+ (goto/sleep 없음)
25
+ """
26
+ selector = (
27
+ 'xpath=//caption[contains(normalize-space(.), "기업간비교자료")]'
28
+ "/following-sibling::thead//th[not(@colspan)]"
29
+ )
30
+ await browser.wait_attached(selector)
31
+ th_texts = await browser.all_texts(selector)
32
+
33
+ codes: list[str] = []
34
+ for i, t in enumerate(th_texts):
35
+ text = (t or "").strip()
36
+ if not text:
37
+ continue
38
+ m = _CODE_RE.search(text)
39
+ if not m:
40
+ continue
41
+ codes.append(m.group(0))
42
+
43
+ # 중복 제거(순서 유지)
44
+ seen: set[str] = set()
45
+ uniq: list[str] = []
46
+ for c in codes:
47
+ if c not in seen:
48
+ seen.add(c)
49
+ uniq.append(c)
50
+ logger.debug(f"c106 header codes: {uniq}")
51
+ return uniq
52
+
53
+
54
+ def html_table_to_df(html: str, codes: list[str]) -> pd.DataFrame:
55
+ df = pd.read_html(StringIO(html), header=None)[0]
56
+ if df is None or df.empty:
57
+ return pd.DataFrame()
58
+
59
+ df.columns = ["항목_group", "항목"] + codes
60
+ df["항목_group"] = df["항목_group"].ffill()
61
+
62
+ # 첫 두 줄 주가데이터 주입(기존 로직 유지)
63
+ for i in range(min(2, len(df))):
64
+ row = df.loc[i].tolist()
65
+ new_row = ["주가데이터"] + row
66
+ df.loc[i] = new_row[: len(df.columns)]
67
+
68
+ df = df[df["항목"].notna()].reset_index(drop=True)
69
+ df.loc[df["항목"].isin(["투자의견", "목표주가(원)"]), "항목_group"] = "기타지표"
70
+ df = df[df["항목"] != "재무연월"].reset_index(drop=True)
71
+
72
+ for col in df.columns[2:]:
73
+ df[col] = df[col].replace("-", "0")
74
+ df[col] = pd.to_numeric(df[col], errors="coerce")
75
+
76
+ df["항목_group"] = df["항목_group"].astype("string").map(clean_text)
77
+ df["항목"] = df["항목"].astype("string").map(clean_text)
78
+
79
+ return df.replace({np.nan: None})
80
+
81
+
82
+ def df_to_c106_metric_list(df: pd.DataFrame) -> list[dict[str, Any]]:
83
+ """
84
+ C106 DataFrame -> records(list[dict])
85
+
86
+ A안 적용:
87
+ - 항목(key)은 normalize_c1034_item으로 강하게 정규화(괄호/별표 등 제거)
88
+ - 항목_raw는 정규화 전(단 UI 노이즈만 제거된) 원라벨을 저장
89
+ - 항목_group은 그대로 두되, 필요 없으면 caller에서 삭제하면 됨
90
+ """
91
+ if df is None or df.empty:
92
+ return []
93
+
94
+ df = df.copy()
95
+
96
+ # raw 보존(정규화 전, UI 노이즈만 제거)
97
+ raw = df["항목"].where(df["항목"].notna(), None)
98
+ df["항목_raw"] = raw.map(
99
+ lambda x: sanitize_label(str(x)) if x is not None else None
100
+ )
101
+
102
+ # 항목_group 컬럼들은 제거(있을 때만)
103
+ drop_cols = [c for c in ("항목_group", "항목_group_raw") if c in df.columns]
104
+ if drop_cols:
105
+ df = df.drop(columns=drop_cols)
106
+
107
+ # key 정규화(A안)
108
+ df["항목"] = df["항목"].map(
109
+ lambda x: normalize_metric_label(str(x)) if x is not None else ""
110
+ )
111
+
112
+ # 유효 행만
113
+ df = df[df["항목"].astype(str).str.strip() != ""].reset_index(drop=True)
114
+
115
+ # NaN -> None
116
+ df = df.where(pd.notnull(df), None)
117
+
118
+ return df.to_dict(orient="records")
119
+
120
+
121
+ async def parse_c106_current_table(
122
+ browser: BrowserPort,
123
+ *,
124
+ columns: list[str],
125
+ table_selector: str = "#cTB611",
126
+ table_index: int = 0,
127
+ timeout_ms: int = 10_000,
128
+ ) -> list[dict[str, Any]]:
129
+ """
130
+ ✅ 현재 화면(이미 goto/대기 완료된 상태)에서 비교테이블만 파싱한다.
131
+ """
132
+ await browser.wait_table_nth_ready(
133
+ table_selector, index=table_index, min_rows=3, timeout_ms=timeout_ms
134
+ )
135
+ html = await browser.outer_html_nth(table_selector, table_index)
136
+ df = html_table_to_df(html, columns)
137
+ return df_to_c106_metric_list(df)
@@ -0,0 +1,254 @@
1
+ # scraper2_hj3415/app/parsing/c108_parser.py
2
+ from __future__ import annotations
3
+
4
+ import re
5
+ from html import unescape
6
+ from typing import Any
7
+ from common_hj3415.utils import clean_text
8
+ from scraper2_hj3415.app.ports.browser.browser_port import BrowserPort
9
+
10
+ _TAGS = re.compile(r"<[^>]+>")
11
+ _WS = re.compile(r"\s+")
12
+
13
+ _TD_ID_RE = re.compile(r"^td(\d+)$") # td0, td1, ...
14
+ _C_ID_RE = re.compile(r"^c(\d+)$") # c0, c1, ...
15
+
16
+
17
+ def _clean_text(x: Any) -> str:
18
+ """
19
+ 경계/로깅/파싱 단계에서 Any를 안전하게 사람이 읽을 문자열로 만든다.
20
+ - Any → str
21
+ - html entity unescape
22
+ - 이후 normalize_text 적용
23
+ """
24
+ if x is None:
25
+ return ""
26
+ s = unescape(str(x)) # ❗ x or "" 대신 None만 처리 (falsy 보존)
27
+ return clean_text(s)
28
+
29
+
30
+ def _clean_html_to_text(html: str) -> str:
31
+ s = unescape(html or "")
32
+ s = s.replace("<br/>", "\n").replace("<br>", "\n").replace("<br />", "\n")
33
+ s = _TAGS.sub("", s)
34
+ s = s.replace("\r", "")
35
+ lines = [ln.strip() for ln in s.split("\n")]
36
+ lines = [ln for ln in lines if ln]
37
+ return "\n".join(lines).strip()
38
+
39
+
40
+ _UI_LINES = {"요약정보닫기"}
41
+ _UI_PREFIXES = ("요약정보 :", "요약정보:")
42
+ _BULLET_RE = re.compile(r"^\s*▶\s*")
43
+ _MULTI_NL = re.compile(r"\n{3,}")
44
+
45
+
46
+ def _prettify_report_text(
47
+ text: str,
48
+ *,
49
+ bullet: str = "- ",
50
+ ) -> str:
51
+ if not text:
52
+ return ""
53
+
54
+ lines = [ln.strip() for ln in text.split("\n")]
55
+ out: list[str] = []
56
+
57
+ for ln in lines:
58
+ if not ln:
59
+ continue
60
+
61
+ # UI 잔재 제거 (prefix)
62
+ for p in _UI_PREFIXES:
63
+ if ln.startswith(p):
64
+ ln = ln[len(p) :].strip()
65
+ break
66
+ if not ln:
67
+ continue
68
+
69
+ if ln in _UI_LINES:
70
+ continue
71
+
72
+ # 불릿 정리
73
+ if _BULLET_RE.match(ln):
74
+ ln = _BULLET_RE.sub(bullet, ln)
75
+
76
+ out.append(ln)
77
+
78
+ s = "\n".join(out)
79
+ s = _MULTI_NL.sub("\n\n", s).strip()
80
+ return s
81
+
82
+
83
+ def _parse_target_price(x: Any) -> int | None:
84
+ s = _clean_text(x)
85
+ if not s:
86
+ return None
87
+ s2 = re.sub(r"[^0-9]", "", s)
88
+ if not s2:
89
+ return None
90
+ try:
91
+ return int(s2)
92
+ except Exception:
93
+ return None
94
+
95
+
96
+ def _parse_pages(x: Any) -> int | None:
97
+ s = _clean_text(x)
98
+ m = re.search(r"(\d+)", s)
99
+ return int(m.group(1)) if m else None
100
+
101
+
102
+ async def parse_c108_recent_reports_dom(
103
+ browser: BrowserPort,
104
+ *,
105
+ table_selector: str = "#tableCmpDetail",
106
+ ) -> list[dict[str, Any]]:
107
+ """
108
+ pandas(read_html) 없이 DOM 기반으로 안정적으로 추출.
109
+
110
+ 전제:
111
+ - "정상 행"에는 td[id^='td'] 가 있고, 그 id가 tdN 형태다.
112
+ - "상세 요약(숨김)"은 td[id='cN'] data-content로 붙어있다.
113
+ - summary는 td[id='tdN'] data-content에, comment는 td[id='cN'] data-content에 들어있다.
114
+
115
+ BrowserPort 요구 기능:
116
+ - wait_attached(selector)
117
+ - count_in_nth(scope_selector, scope_index, inner_selector) -> int
118
+ - eval_in_nth_first(scope_selector, scope_index, inner_selector, expression) -> Any
119
+ (이미 네가 추가해둔 형태 그대로 사용)
120
+ """
121
+
122
+ await browser.wait_attached(table_selector)
123
+
124
+ # tbody tr 개수
125
+ tr_count = await browser.count_in_nth(
126
+ table_selector, scope_index=0, inner_selector="tbody tr"
127
+ )
128
+ if tr_count <= 0:
129
+ return []
130
+
131
+ out: list[dict[str, Any]] = []
132
+
133
+ for tr_idx in range(tr_count):
134
+ # row scope: table_selector >> tbody tr (nth=tr_idx)
135
+ row_scope = f"{table_selector} >> tbody tr >> nth={tr_idx}"
136
+
137
+ # 1) 이 행이 "정상 행"인지 판정: td[id^=td]가 있어야 함
138
+ td_id = await browser.eval_in_nth_first(
139
+ row_scope,
140
+ scope_index=0,
141
+ inner_selector="td[id^='td']",
142
+ expression="el => el.id",
143
+ )
144
+ td_id = _clean_text(td_id)
145
+ m = _TD_ID_RE.match(td_id)
146
+ if not m:
147
+ # 숨김 상세행(cN) 같은 건 스킵
148
+ continue
149
+
150
+ n = m.group(1) # row_id
151
+ # 2) 컬럼 텍스트 추출 (C108 테이블 구조에 맞게 td 순서 기준)
152
+ # 보통: 1=일자, 2=제목, 3=작성자, 4=제공처, 5=투자의견, 6=목표가, 7=분량 ...
153
+ date = _clean_text(
154
+ await browser.eval_in_nth_first(
155
+ row_scope,
156
+ scope_index=0,
157
+ inner_selector="td:nth-child(1)",
158
+ expression="el => el.innerText",
159
+ )
160
+ )
161
+ title = _clean_text(
162
+ await browser.eval_in_nth_first(
163
+ row_scope,
164
+ scope_index=0,
165
+ inner_selector="td:nth-child(2)",
166
+ expression="el => el.innerText",
167
+ )
168
+ )
169
+
170
+ # 최소 필터
171
+ if not date or not title:
172
+ continue
173
+
174
+ authors = _clean_text(
175
+ await browser.eval_in_nth_first(
176
+ row_scope,
177
+ scope_index=0,
178
+ inner_selector="td:nth-child(3)",
179
+ expression="el => el.innerText",
180
+ )
181
+ ) or None
182
+
183
+ provider = _clean_text(
184
+ await browser.eval_in_nth_first(
185
+ row_scope,
186
+ scope_index=0,
187
+ inner_selector="td:nth-child(4)",
188
+ expression="el => el.innerText",
189
+ )
190
+ ) or None
191
+
192
+ rating = _clean_text(
193
+ await browser.eval_in_nth_first(
194
+ row_scope,
195
+ scope_index=0,
196
+ inner_selector="td:nth-child(5)",
197
+ expression="el => el.innerText",
198
+ )
199
+ ) or None
200
+
201
+ target_price_raw = await browser.eval_in_nth_first(
202
+ row_scope,
203
+ scope_index=0,
204
+ inner_selector="td:nth-child(6)",
205
+ expression="el => el.innerText",
206
+ )
207
+ target_price = _parse_target_price(target_price_raw)
208
+
209
+ pages_raw = await browser.eval_in_nth_first(
210
+ row_scope,
211
+ scope_index=0,
212
+ inner_selector="td:nth-child(7)",
213
+ expression="el => el.innerText",
214
+ )
215
+ pages = _parse_pages(pages_raw)
216
+
217
+ # 3) summary/comment: N으로 tdN / cN의 data-content를 직접 읽기
218
+ # (DOM에 존재하지만 display:none인 경우도 data-content는 읽을 수 있음)
219
+ summary_html = await browser.eval_in_nth_first(
220
+ table_selector,
221
+ scope_index=0,
222
+ inner_selector=f"td#td{n}",
223
+ expression="el => el.getAttribute('data-content') || ''",
224
+ )
225
+ comment_html = await browser.eval_in_nth_first(
226
+ table_selector,
227
+ scope_index=0,
228
+ inner_selector=f"td#c{n}",
229
+ expression="el => el.getAttribute('data-content') || ''",
230
+ )
231
+
232
+ summary = _prettify_report_text(_clean_html_to_text(_clean_text(summary_html)))
233
+ comment = _prettify_report_text(_clean_html_to_text(_clean_text(comment_html)))
234
+
235
+ out.append(
236
+ {
237
+ "row_id": n,
238
+ "date": date,
239
+ "title": title,
240
+ "authors": authors,
241
+ "provider": provider,
242
+ "rating": rating,
243
+ "target_price": target_price,
244
+ "pages": pages,
245
+ "summary": summary or None,
246
+ "comment": comment or None,
247
+ }
248
+ )
249
+
250
+ return out
251
+
252
+
253
+ async def parse_c108_to_dict(browser: BrowserPort) -> dict[str, list[dict[str, Any]]]:
254
+ return {"리포트": await parse_c108_recent_reports_dom(browser)}
File without changes
File without changes
@@ -0,0 +1,9 @@
1
+ # scraper2_hj3415/app/ports/browser/browser_factory_port.py
2
+ from __future__ import annotations
3
+ from typing import Protocol, AsyncContextManager
4
+
5
+ from scraper2_hj3415.app.ports.browser.browser_port import BrowserPort
6
+
7
+ class BrowserFactoryPort(Protocol):
8
+ def lease(self) -> AsyncContextManager[BrowserPort]: ...
9
+ async def aclose(self) -> None: ...
@@ -0,0 +1,32 @@
1
+ # scraper2_hj3415/app/ports/browser/browser_port.py
2
+ from __future__ import annotations
3
+
4
+ from typing import Protocol
5
+
6
+ from .capabilities import (
7
+ BrowserInteractionPort,
8
+ BrowserNavigationPort,
9
+ BrowserScopePort,
10
+ BrowserTablePort,
11
+ BrowserTextPort,
12
+ BrowserWaitPort,
13
+ )
14
+
15
+
16
+ class BrowserPort(
17
+ BrowserNavigationPort,
18
+ BrowserWaitPort,
19
+ BrowserInteractionPort,
20
+ BrowserTextPort,
21
+ BrowserScopePort,
22
+ BrowserTablePort,
23
+ Protocol,
24
+ ):
25
+ """
26
+ 프로젝트에서 사용하는 최종 BrowserPort.
27
+
28
+ - 내부는 capability 단위로 분리되어 있으며,
29
+ 필요하면 파서/유스케이스가 BrowserPort 대신
30
+ 특정 capability 포트만 의존하도록 바꿀 수 있다.
31
+ """
32
+ ...
@@ -0,0 +1,15 @@
1
+ from .navigation import BrowserNavigationPort
2
+ from .wait import BrowserWaitPort
3
+ from .interaction import BrowserInteractionPort
4
+ from .text import BrowserTextPort
5
+ from .scope import BrowserScopePort
6
+ from .table import BrowserTablePort
7
+
8
+ __all__ = [
9
+ "BrowserNavigationPort",
10
+ "BrowserWaitPort",
11
+ "BrowserInteractionPort",
12
+ "BrowserTextPort",
13
+ "BrowserScopePort",
14
+ "BrowserTablePort",
15
+ ]
@@ -0,0 +1,27 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Protocol
4
+
5
+
6
+ class BrowserInteractionPort(Protocol):
7
+ """클릭/스크롤 등 상호작용"""
8
+
9
+ async def click(
10
+ self,
11
+ selector: str,
12
+ *,
13
+ index: int = 0,
14
+ timeout_ms: int = 4_000,
15
+ force: bool = False,
16
+ ) -> None: ...
17
+
18
+ async def try_click(
19
+ self,
20
+ selector: str,
21
+ *,
22
+ index: int = 0,
23
+ timeout_ms: int = 1_500,
24
+ force: bool = False,
25
+ ) -> bool: ...
26
+
27
+ async def scroll_into_view(self, selector: str, *, index: int = 0) -> None: ...
@@ -0,0 +1,18 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Protocol
4
+
5
+
6
+ class BrowserNavigationPort(Protocol):
7
+ """페이지 이동/기본 정보"""
8
+
9
+ async def title(self) -> str: ...
10
+ async def current_url(self) -> str: ...
11
+
12
+ async def goto_and_wait_for_stable(
13
+ self,
14
+ url: str,
15
+ timeout_ms: int = 10_000,
16
+ ) -> None: ...
17
+
18
+ async def reload(self, *, timeout_ms: int = 10_000) -> None: ...
@@ -0,0 +1,66 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any, Protocol
4
+
5
+
6
+ class BrowserScopePort(Protocol):
7
+ """scope/nth 컨텍스트 기반 조회 (현재는 기존 API 유지)"""
8
+
9
+ async def is_attached(self, selector: str, *, index: int = 0) -> bool: ...
10
+
11
+ async def computed_style(
12
+ self,
13
+ selector: str,
14
+ *,
15
+ index: int = 0,
16
+ prop: str,
17
+ ) -> str: ...
18
+
19
+ async def count_in_nth(
20
+ self,
21
+ scope_selector: str,
22
+ *,
23
+ scope_index: int,
24
+ inner_selector: str,
25
+ ) -> int: ...
26
+
27
+ async def eval_in_nth_first(
28
+ self,
29
+ scope_selector: str,
30
+ *,
31
+ scope_index: int,
32
+ inner_selector: str,
33
+ expression: str,
34
+ ) -> Any: ...
35
+
36
+ async def inner_text_in_nth(
37
+ self,
38
+ scope_selector: str,
39
+ *,
40
+ scope_index: int,
41
+ inner_selector: str,
42
+ inner_index: int = 0,
43
+ timeout_ms: int = 10_000,
44
+ ) -> str:
45
+ """
46
+ scope_selector의 nth(scope_index) 요소 안에서
47
+ inner_selector의 nth(inner_index) 요소의 innerText를 반환.
48
+ (렌더링 기준 텍스트: 줄바꿈/스타일 영향 반영)
49
+ """
50
+ ...
51
+
52
+ async def text_content_in_nth(
53
+ self,
54
+ scope_selector: str,
55
+ *,
56
+ scope_index: int,
57
+ inner_selector: str,
58
+ inner_index: int = 0,
59
+ timeout_ms: int = 10_000,
60
+ ) -> str:
61
+ """
62
+ scope_selector의 nth(scope_index) 요소 안에서
63
+ inner_selector의 nth(inner_index) 요소의 textContent를 반환.
64
+ (DOM 기준 텍스트: 숨김 텍스트도 포함될 수 있음)
65
+ """
66
+ ...
@@ -0,0 +1,28 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any, Protocol
4
+
5
+
6
+ class BrowserTablePort(Protocol):
7
+ """테이블 파싱/헤더 추출"""
8
+
9
+ async def table_records(
10
+ self,
11
+ table_selector: str,
12
+ *,
13
+ header: int | list[int] | None = 0,
14
+ ) -> list[dict[str, Any]]: ...
15
+
16
+ async def table_header_texts_nth(
17
+ self,
18
+ table_selector: str,
19
+ *,
20
+ index: int,
21
+ ) -> list[str]: ...
22
+
23
+ async def table_header_periods_mm_nth(
24
+ self,
25
+ table_selector: str,
26
+ *,
27
+ index: int,
28
+ ) -> list[str]: ...