dipx 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dipx/__init__.py ADDED
@@ -0,0 +1,61 @@
1
+ """Python client for the dip-x.store marketplace.
2
+
3
+ Quick start::
4
+
5
+ from dipx import DipXClient
6
+
7
+ client = DipXClient()
8
+ profile = client.login("Lincoln", "password")
9
+ print(profile.username, profile.id)
10
+
11
+ for p in client.get_my_products():
12
+ print(p.id, p.title, p.price, p.status)
13
+
14
+ client.logout()
15
+ """
16
+
17
+ from .client import DipXClient
18
+ from .exceptions import (
19
+ AuthError,
20
+ DipXError,
21
+ InsufficientBalanceError,
22
+ NotFoundError,
23
+ RateLimitError,
24
+ ValidationError,
25
+ )
26
+ from .models import (
27
+ Balance,
28
+ BumpInfo,
29
+ Conversation,
30
+ Message,
31
+ NotificationSettings,
32
+ Order,
33
+ Product,
34
+ Profile,
35
+ Review,
36
+ Transaction,
37
+ WishlistResult,
38
+ )
39
+
40
+ __version__ = "0.1.0"
41
+
42
+ __all__ = [
43
+ "DipXClient",
44
+ "AuthError",
45
+ "DipXError",
46
+ "InsufficientBalanceError",
47
+ "NotFoundError",
48
+ "RateLimitError",
49
+ "ValidationError",
50
+ "Balance",
51
+ "BumpInfo",
52
+ "Conversation",
53
+ "Message",
54
+ "NotificationSettings",
55
+ "Order",
56
+ "Product",
57
+ "Profile",
58
+ "Review",
59
+ "Transaction",
60
+ "WishlistResult",
61
+ ]
dipx/_http.py ADDED
@@ -0,0 +1,177 @@
1
+ """Internal HTTP helper: holds a :class:`requests.Session`, manages CSRF.
2
+
3
+ The marketplace authenticates with cookies (PHPSESSID) and rejects most
4
+ JSON endpoints that don't have an ``X-Requested-With: XMLHttpRequest``
5
+ header and a same-origin ``Referer``. CSRF tokens are required by all
6
+ state-mutating endpoints and refreshed on every full HTML page load.
7
+ """
8
+
9
+ from __future__ import annotations
10
+
11
+ import re
12
+ from typing import Any, Dict, Optional
13
+
14
+ import requests
15
+
16
+ from .exceptions import (
17
+ AuthError,
18
+ DipXError,
19
+ InsufficientBalanceError,
20
+ NotFoundError,
21
+ RateLimitError,
22
+ ValidationError,
23
+ )
24
+
25
+ BASE_URL = "https://dip-x.store"
26
+
27
+ _CSRF_RE = re.compile(
28
+ r'name="csrf_token"\s+value="([0-9a-f]{32,128})"', re.IGNORECASE
29
+ )
30
+
31
+
32
+ class _Http:
33
+ def __init__(
34
+ self,
35
+ *,
36
+ base_url: str = BASE_URL,
37
+ timeout: float = 30.0,
38
+ user_agent: Optional[str] = None,
39
+ ) -> None:
40
+ self.base_url = base_url.rstrip("/")
41
+ self.timeout = timeout
42
+ self.csrf_token: Optional[str] = None
43
+ self.session = requests.Session()
44
+ self.session.headers.update(
45
+ {
46
+ "User-Agent": user_agent
47
+ or (
48
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 "
49
+ "(KHTML, like Gecko) Chrome/130.0 Safari/537.36"
50
+ ),
51
+ "Accept-Language": "ru,en;q=0.9",
52
+ }
53
+ )
54
+
55
+ # ── url helpers ──────────────────────────────────────────────
56
+ def url(self, path: str) -> str:
57
+ if path.startswith(("http://", "https://")):
58
+ return path
59
+ if not path.startswith("/"):
60
+ path = "/" + path
61
+ return self.base_url + path
62
+
63
+ # ── csrf ────────────────────────────────────────────────────
64
+ def refresh_csrf(self, html: str) -> Optional[str]:
65
+ m = _CSRF_RE.search(html)
66
+ if m:
67
+ self.csrf_token = m.group(1)
68
+ return self.csrf_token
69
+
70
+ def ensure_csrf(self, page_path: str = "/profile") -> str:
71
+ """Make sure we have a CSRF token; load ``page_path`` if needed."""
72
+ if self.csrf_token:
73
+ return self.csrf_token
74
+ resp = self.session.get(self.url(page_path), timeout=self.timeout)
75
+ self.refresh_csrf(resp.text)
76
+ if not self.csrf_token:
77
+ raise AuthError(
78
+ "Не удалось получить csrf_token (вы авторизованы?)",
79
+ status_code=resp.status_code,
80
+ )
81
+ return self.csrf_token
82
+
83
+ # ── core request ────────────────────────────────────────────
84
+ def request(
85
+ self,
86
+ method: str,
87
+ path: str,
88
+ *,
89
+ json: Any = None,
90
+ data: Any = None,
91
+ files: Any = None,
92
+ params: Any = None,
93
+ headers: Optional[Dict[str, str]] = None,
94
+ referer: Optional[str] = None,
95
+ allow_redirects: bool = True,
96
+ as_json: bool = True,
97
+ raise_status: bool = True,
98
+ ) -> Any:
99
+ url = self.url(path)
100
+ hdrs: Dict[str, str] = {}
101
+ if as_json:
102
+ hdrs["X-Requested-With"] = "XMLHttpRequest"
103
+ hdrs["Accept"] = "application/json, text/plain, */*"
104
+ if referer:
105
+ hdrs["Referer"] = self.url(referer)
106
+ if headers:
107
+ hdrs.update(headers)
108
+
109
+ resp = self.session.request(
110
+ method,
111
+ url,
112
+ json=json,
113
+ data=data,
114
+ files=files,
115
+ params=params,
116
+ headers=hdrs,
117
+ timeout=self.timeout,
118
+ allow_redirects=allow_redirects,
119
+ )
120
+
121
+ if raise_status:
122
+ _raise_for_status(resp)
123
+
124
+ if as_json:
125
+ ct = resp.headers.get("Content-Type", "")
126
+ if "application/json" in ct:
127
+ try:
128
+ return resp.json()
129
+ except Exception:
130
+ pass
131
+ return resp
132
+
133
+ def get_json(self, path: str, **kw) -> Any:
134
+ return self.request("GET", path, **kw)
135
+
136
+ def post_json(self, path: str, **kw) -> Any:
137
+ return self.request("POST", path, **kw)
138
+
139
+ def get_html(self, path: str, **kw) -> str:
140
+ kw.setdefault("as_json", False)
141
+ resp = self.request("GET", path, **kw)
142
+ text = resp.text
143
+ self.refresh_csrf(text)
144
+ return text
145
+
146
+
147
+ # ── helpers ─────────────────────────────────────────────────────
148
+ def _payload(resp: requests.Response) -> Any:
149
+ ct = resp.headers.get("Content-Type", "")
150
+ if "application/json" in ct:
151
+ try:
152
+ return resp.json()
153
+ except Exception:
154
+ return None
155
+ return None
156
+
157
+
158
+ def _raise_for_status(resp: requests.Response) -> None:
159
+ code = resp.status_code
160
+ if code < 400:
161
+ return
162
+ body = _payload(resp)
163
+ msg: Any = None
164
+ if isinstance(body, dict):
165
+ msg = body.get("error") or body.get("message") or body.get("detail")
166
+ msg = msg or resp.reason or "HTTP error"
167
+ if code in (401, 403):
168
+ raise AuthError(msg, status_code=code, payload=body)
169
+ if code == 404:
170
+ raise NotFoundError(msg, status_code=code, payload=body)
171
+ if code == 429:
172
+ raise RateLimitError(msg, status_code=code, payload=body)
173
+ if code == 402:
174
+ raise InsufficientBalanceError(msg, status_code=code, payload=body)
175
+ if code in (400, 422):
176
+ raise ValidationError(msg, status_code=code, payload=body)
177
+ raise DipXError(msg, status_code=code, payload=body)
dipx/_parse.py ADDED
@@ -0,0 +1,157 @@
1
+ """HTML scraping helpers — used wherever the site does not return JSON.
2
+
3
+ The marketplace renders many lists (products, orders, conversations) as
4
+ server-side HTML. We parse them with BeautifulSoup. Keep selectors as
5
+ small and tolerant as possible — the site changes its markup.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import re
11
+ from typing import List, Optional
12
+
13
+ from bs4 import BeautifulSoup
14
+
15
+ from .models import Balance, Order, Product, Profile
16
+
17
+
18
+ _NUMBER_RE = re.compile(r"-?\d+(?:[.,]\d+)?")
19
+
20
+
21
+ def _to_float(text: Optional[str]) -> float:
22
+ if not text:
23
+ return 0.0
24
+ m = _NUMBER_RE.search(text.replace("\u00a0", "").replace(" ", ""))
25
+ return float(m.group(0).replace(",", ".")) if m else 0.0
26
+
27
+
28
+ def _to_int(text: Optional[str]) -> int:
29
+ if not text:
30
+ return 0
31
+ m = _NUMBER_RE.search(text.replace("\u00a0", "").replace(" ", ""))
32
+ return int(float(m.group(0).replace(",", "."))) if m else 0
33
+
34
+
35
+ def parse_profile_id_from_html(html: str) -> Optional[int]:
36
+ """Extract user ID from various places it may be embedded."""
37
+ for pat in (
38
+ r'data-user-id="(\d+)"',
39
+ r'"user_id":(\d+)',
40
+ r'"id":(\d+),"username"',
41
+ r'/user/[^"]+"[^>]*data-id="(\d+)"',
42
+ ):
43
+ m = re.search(pat, html)
44
+ if m:
45
+ return int(m.group(1))
46
+ return None
47
+
48
+
49
+ def parse_profile(html: str, *, username: str) -> Profile:
50
+ soup = BeautifulSoup(html, "html.parser")
51
+ uid = parse_profile_id_from_html(html) or 0
52
+ avatar = None
53
+ img = soup.select_one(".profile-card__avatar img, .user-card__avatar img")
54
+ if img and img.get("src"):
55
+ avatar = img["src"]
56
+ return Profile(id=uid, username=username, avatar=avatar)
57
+
58
+
59
+ def parse_balance(html: str) -> Balance:
60
+ """Parse ``/profile/balance`` page into a :class:`Balance`."""
61
+ soup = BeautifulSoup(html, "html.parser")
62
+ total = 0.0
63
+ available = 0.0
64
+ frozen = 0.0
65
+ bv = soup.select_one("#balanceValue, .profile-balance__value")
66
+ if bv:
67
+ total = _to_float(bv.get_text())
68
+ # Some pages expose frozen/available in chips:
69
+ for chip in soup.select(".balance-chip, .profile-balance__chip, .balance-block"):
70
+ txt = chip.get_text(" ", strip=True).lower()
71
+ if "доступ" in txt:
72
+ available = _to_float(txt) or available
73
+ elif "заморож" in txt or "холд" in txt:
74
+ frozen = _to_float(txt) or frozen
75
+ # Withdrawal form gives a hard upper bound on available
76
+ inp = soup.select_one('form#withdrawalForm input[name="amount"]')
77
+ if inp and inp.get("max"):
78
+ try:
79
+ available = max(available, float(inp["max"]))
80
+ except ValueError:
81
+ pass
82
+ if available == 0.0 and frozen == 0.0:
83
+ available = total
84
+ return Balance(total=total, available=available, frozen=frozen)
85
+
86
+
87
+ def parse_products(html: str) -> List[Product]:
88
+ soup = BeautifulSoup(html, "html.parser")
89
+ out: List[Product] = []
90
+ for row in soup.select(".product-row"):
91
+ pid = row.get("data-id") or row.get("data-product-id") or "0"
92
+ status = row.get("data-status") or "active"
93
+ title_el = row.select_one(".product-row__title")
94
+ price_el = row.select_one(".product-row__price")
95
+ views_el = row.select_one(".product-row__views")
96
+ sales_el = row.select_one(".product-row__sales")
97
+ date_el = row.select_one(".product-row__date")
98
+ link_el = row.select_one(".product-row__main, a[href*='/product/']")
99
+ img_el = row.select_one(".product-row__image img")
100
+ out.append(
101
+ Product(
102
+ id=int(pid) if str(pid).isdigit() else 0,
103
+ title=title_el.get_text(strip=True) if title_el else "",
104
+ price=_to_float(price_el.get_text() if price_el else ""),
105
+ status=status,
106
+ views=_to_int(views_el.get_text() if views_el else ""),
107
+ sales=_to_int(sales_el.get_text() if sales_el else ""),
108
+ url=link_el.get("href") if link_el else None,
109
+ image=img_el.get("src") if img_el else None,
110
+ created_at=date_el.get_text(strip=True) if date_el else None,
111
+ )
112
+ )
113
+ return out
114
+
115
+
116
+ def parse_orders(html: str, *, role: str) -> List[Order]:
117
+ soup = BeautifulSoup(html, "html.parser")
118
+ out: List[Order] = []
119
+ for row in soup.select(".order-row"):
120
+ link = row.select_one("a[href*='/order/']")
121
+ oid = 0
122
+ if link and link.get("href"):
123
+ m = re.search(r"/order/(\d+)", link["href"])
124
+ if m:
125
+ oid = int(m.group(1))
126
+ title_el = row.select_one(".order-row__title")
127
+ price_el = row.select_one(".order-row__price")
128
+ date_el = row.select_one(".order-row__date")
129
+ user_el = row.select_one(".order-row__user")
130
+ badge_el = row.select_one(".badge, .order-row__status")
131
+ status = "unknown"
132
+ if badge_el:
133
+ txt = badge_el.get_text(strip=True).lower()
134
+ if "заверш" in txt or "выпол" in txt:
135
+ status = "completed"
136
+ elif "ожид" in txt or "пенд" in txt:
137
+ status = "pending"
138
+ elif "отмен" in txt:
139
+ status = "cancelled"
140
+ elif "возвр" in txt or "refund" in txt:
141
+ status = "refunded"
142
+ elif "спор" in txt or "dispute" in txt:
143
+ status = "disputed"
144
+ else:
145
+ status = txt
146
+ out.append(
147
+ Order(
148
+ id=oid,
149
+ product_title=title_el.get_text(strip=True) if title_el else "",
150
+ amount=_to_float(price_el.get_text() if price_el else ""),
151
+ status=status,
152
+ role=role,
153
+ partner=user_el.get_text(strip=True) if user_el else "",
154
+ date=date_el.get_text(strip=True) if date_el else None,
155
+ )
156
+ )
157
+ return out