scraper2-hj3415 2.2.0__tar.gz → 2.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/PKG-INFO +1 -1
  2. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/pyproject.toml +1 -1
  3. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/.DS_Store +0 -0
  4. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/playwright/browser.py +0 -1
  5. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/composition.py +18 -1
  6. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/main.py +8 -2
  7. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/LICENSE +0 -0
  8. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/README.md +0 -0
  9. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/__init__.py +0 -0
  10. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/.DS_Store +0 -0
  11. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/__init__.py +0 -0
  12. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/playwright/__init__.py +0 -0
  13. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/playwright/browser_factory.py +0 -0
  14. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/playwright/session.py +0 -0
  15. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/sinks/.DS_Store +0 -0
  16. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/sinks/memory/__init__.py +0 -0
  17. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/sinks/memory/c101_memory_sink.py +0 -0
  18. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/sinks/memory/c103_memory_sink.py +0 -0
  19. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/sinks/memory/c104_memory_sink.py +0 -0
  20. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/sinks/memory/c106_memory_sink.py +0 -0
  21. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/sinks/memory/c108_memory_sink.py +0 -0
  22. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/sinks/memory/store.py +0 -0
  23. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/sinks/mongo/__init__.py +0 -0
  24. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/sinks/mongo/c101_mongo_sink.py +0 -0
  25. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/sinks/mongo/c103_mongo_sink.py +0 -0
  26. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/sinks/mongo/c104_mongo_sink.py +0 -0
  27. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/sinks/mongo/c106_mongo_sink.py +0 -0
  28. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/adapters/out/sinks/mongo/c108_mongo_sink.py +0 -0
  29. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/__init__.py +0 -0
  30. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/parsing/__init__.py +0 -0
  31. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/parsing/_converters.py +0 -0
  32. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/parsing/_normalize.py +0 -0
  33. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/parsing/c101_parser.py +0 -0
  34. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/parsing/c103_parser.py +0 -0
  35. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/parsing/c104_parser.py +0 -0
  36. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/parsing/c106_parser.py +0 -0
  37. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/parsing/c108_parser.py +0 -0
  38. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/ports/__init__.py +0 -0
  39. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/ports/browser/__init__.py +0 -0
  40. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/ports/browser/browser_factory_port.py +0 -0
  41. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/ports/browser/browser_port.py +0 -0
  42. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/ports/ingest_port.py +0 -0
  43. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/ports/sinks/__init__.py +0 -0
  44. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/ports/sinks/base_sink_port.py +0 -0
  45. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/ports/sinks/c101_sink_port.py +0 -0
  46. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/ports/sinks/c103_sink_port.py +0 -0
  47. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/ports/sinks/c104_sink_port.py +0 -0
  48. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/ports/sinks/c106_sink_port.py +0 -0
  49. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/ports/sinks/c108_sink_port.py +0 -0
  50. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/usecases/__init__.py +0 -0
  51. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/usecases/fetch/__init__.py +0 -0
  52. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/usecases/fetch/fetch_c101.py +0 -0
  53. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/usecases/fetch/fetch_c103.py +0 -0
  54. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/usecases/fetch/fetch_c104.py +0 -0
  55. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/usecases/fetch/fetch_c106.py +0 -0
  56. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/usecases/fetch/fetch_c108.py +0 -0
  57. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/usecases/ingest/__init__.py +0 -0
  58. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/usecases/ingest/ingest_c101.py +0 -0
  59. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/usecases/ingest/ingest_c103.py +0 -0
  60. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/usecases/ingest/ingest_c104.py +0 -0
  61. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/usecases/ingest/ingest_c106.py +0 -0
  62. {scraper2_hj3415-2.2.0 → scraper2_hj3415-2.3.0}/src/scraper2/app/usecases/ingest/ingest_c108.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: scraper2-hj3415
3
- Version: 2.2.0
3
+ Version: 2.3.0
4
4
  Summary: Naver WiseReport scraper
5
5
  Keywords: example,demo
6
6
  Author-email: Hyungjin Kim <hj3415@gmail.com>
@@ -4,7 +4,7 @@ build-backend = "flit_core.buildapi"
4
4
 
5
5
  [project]
6
6
  name = "scraper2-hj3415" # PyPI 이름 (하이픈 허용)
7
- version = "2.2.0"
7
+ version = "2.3.0"
8
8
  description = "Naver WiseReport scraper"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.11"
@@ -10,7 +10,6 @@ class PlaywrightBrowser:
10
10
  self.page = page
11
11
 
12
12
  async def goto(self, url: str, timeout_ms: int = 10_000) -> None:
13
- self.page.set_default_timeout(10_000)
14
13
  await self.page.goto(url, timeout=timeout_ms)
15
14
 
16
15
  async def title(self) -> str:
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
 
4
4
  import os
5
5
  from dataclasses import dataclass
6
- from typing import Literal
6
+ from typing import Literal, Optional
7
7
 
8
8
  from pymongo.asynchronous.database import AsyncDatabase
9
9
 
@@ -109,6 +109,23 @@ class Usecases:
109
109
  store: InMemoryStore | None = None # ✅ memory일 때만
110
110
  mongo: Mongo | None = None # ✅ mongo일 때만
111
111
  db: AsyncDatabase | None = None # ✅ mongo일 때만
112
+ browser_factory: Optional[BrowserFactoryPort] = None
113
+
114
+ async def aclose(self) -> None:
115
+ # 1) playwright 먼저 닫기 (subprocess 정리)
116
+ if self.browser_factory is not None:
117
+ close = getattr(self.browser_factory, "aclose", None)
118
+ if callable(close):
119
+ await close()
120
+
121
+ # 2) mongo 닫기
122
+ if self.mongo is not None:
123
+ close = getattr(self.mongo, "close", None)
124
+ if callable(close):
125
+ out = close()
126
+ if hasattr(out, "__await__"):
127
+ await out
128
+
112
129
 
113
130
 
114
131
  # -------------------------
@@ -233,7 +233,10 @@ def nfs_one(
233
233
  else:
234
234
  typer.echo(_dto_to_pretty(dto))
235
235
  finally:
236
- if getattr(ucs, "mongo", None) is not None:
236
+ close = getattr(ucs, "aclose", None)
237
+ if callable(close):
238
+ await close()
239
+ elif getattr(ucs, "mongo", None) is not None:
237
240
  await _maybe_await_close(ucs.mongo)
238
241
 
239
242
  asyncio.run(_run())
@@ -281,7 +284,10 @@ def nfs_all(
281
284
  progress_every=1, # chunk마다
282
285
  )
283
286
  finally:
284
- if getattr(ucs, "mongo", None) is not None:
287
+ close = getattr(ucs, "aclose", None)
288
+ if callable(close):
289
+ await close()
290
+ elif getattr(ucs, "mongo", None) is not None:
285
291
  await _maybe_await_close(ucs.mongo)
286
292
 
287
293
  asyncio.run(_run())
File without changes