fmtr.tools 1.3.1__tar.gz → 1.3.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fmtr.tools might be problematic. Click here for more details.
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/PKG-INFO +42 -42
- fmtr_tools-1.3.3/fmtr/tools/dns_tools/__init__.py +6 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/dns_tools/client.py +28 -19
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/dns_tools/dm.py +59 -10
- fmtr_tools-1.3.3/fmtr/tools/dns_tools/proxy.py +67 -0
- fmtr_tools-1.3.3/fmtr/tools/dns_tools/server.py +38 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/pattern_tools.py +44 -44
- fmtr_tools-1.3.3/fmtr/tools/version +1 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr.tools.egg-info/PKG-INFO +42 -42
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr.tools.egg-info/SOURCES.txt +2 -0
- fmtr_tools-1.3.1/fmtr/tools/dns_tools/__init__.py +0 -6
- fmtr_tools-1.3.1/fmtr/tools/dns_tools/server.py +0 -89
- fmtr_tools-1.3.1/fmtr/tools/version +0 -1
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/LICENSE +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/README.md +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/__init__.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/ai_tools/__init__.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/ai_tools/agentic_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/ai_tools/inference_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/api_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/async_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/augmentation_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/caching_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/constants.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/data_modelling_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/dataclass_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/datatype_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/debugging_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/docker_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/entrypoints/__init__.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/entrypoints/cache_hfh.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/entrypoints/ep_test.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/entrypoints/remote_debug_test.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/entrypoints/shell_debug.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/environment_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/function_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/google_api_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/hash_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/hfh_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/html_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/http_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/import_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/inspection_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/interface_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/iterator_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/json_fix_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/json_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/logging_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/merging_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/metric_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/name_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/netrc_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/openai_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/packaging_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/parallel_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/path_tools/__init__.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/path_tools/app_path_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/path_tools/path_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/path_tools/type_path_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/pdf_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/platform_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/process_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/profiling_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/random_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/semantic_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/settings_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/setup_tools/__init__.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/setup_tools/setup_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/spaces_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/string_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/tabular_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/tests/__init__.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/tests/conftest.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/tests/helpers.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/tests/test_datatype.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/tests/test_environment.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/tests/test_json.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/tests/test_path.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/tests/test_yaml.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/tokenization_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/unicode_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/version_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr/tools/yaml_tools.py +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr.tools.egg-info/dependency_links.txt +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr.tools.egg-info/entry_points.txt +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr.tools.egg-info/requires.txt +41 -41
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/fmtr.tools.egg-info/top_level.txt +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/pyproject.toml +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/setup.cfg +0 -0
- {fmtr_tools-1.3.1 → fmtr_tools-1.3.3}/setup.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: fmtr.tools
|
|
3
|
-
Version: 1.3.
|
|
3
|
+
Version: 1.3.3
|
|
4
4
|
Summary: Collection of high-level tools to simplify everyday development tasks, with a focus on AI/ML
|
|
5
5
|
Home-page: https://github.com/fmtr/fmtr.tools
|
|
6
6
|
Author: Frontmatter
|
|
@@ -130,60 +130,60 @@ Requires-Dist: logfire[httpx]; extra == "http"
|
|
|
130
130
|
Provides-Extra: setup
|
|
131
131
|
Requires-Dist: setuptools; extra == "setup"
|
|
132
132
|
Provides-Extra: all
|
|
133
|
-
Requires-Dist: deepmerge; extra == "all"
|
|
134
|
-
Requires-Dist: bokeh; extra == "all"
|
|
135
|
-
Requires-Dist: distributed; extra == "all"
|
|
136
|
-
Requires-Dist: pydantic; extra == "all"
|
|
137
|
-
Requires-Dist: appdirs; extra == "all"
|
|
138
|
-
Requires-Dist: sre_yield; extra == "all"
|
|
139
|
-
Requires-Dist: logfire; extra == "all"
|
|
140
|
-
Requires-Dist: ollama; extra == "all"
|
|
141
|
-
Requires-Dist: Unidecode; extra == "all"
|
|
142
|
-
Requires-Dist: flet[all]; extra == "all"
|
|
143
|
-
Requires-Dist: google-auth-httplib2; extra == "all"
|
|
144
|
-
Requires-Dist: pydantic-ai[logfire,openai]; extra == "all"
|
|
145
|
-
Requires-Dist: pydevd-pycharm; extra == "all"
|
|
146
|
-
Requires-Dist: torchvision; extra == "all"
|
|
147
|
-
Requires-Dist: pyyaml; extra == "all"
|
|
148
|
-
Requires-Dist: tabulate; extra == "all"
|
|
149
|
-
Requires-Dist: pymupdf; extra == "all"
|
|
150
|
-
Requires-Dist: docker; extra == "all"
|
|
151
|
-
Requires-Dist: openai; extra == "all"
|
|
152
|
-
Requires-Dist: sentence_transformers; extra == "all"
|
|
153
|
-
Requires-Dist: dnspython[doh]; extra == "all"
|
|
154
|
-
Requires-Dist: regex; extra == "all"
|
|
155
|
-
Requires-Dist: semver; extra == "all"
|
|
156
|
-
Requires-Dist: setuptools; extra == "all"
|
|
157
|
-
Requires-Dist: google-auth; extra == "all"
|
|
158
|
-
Requires-Dist: openpyxl; extra == "all"
|
|
159
|
-
Requires-Dist: google-api-python-client; extra == "all"
|
|
160
|
-
Requires-Dist: torchaudio; extra == "all"
|
|
161
|
-
Requires-Dist: tinynetrc; extra == "all"
|
|
162
|
-
Requires-Dist: httpx_retries; extra == "all"
|
|
163
133
|
Requires-Dist: pymupdf4llm; extra == "all"
|
|
164
134
|
Requires-Dist: yamlscript; extra == "all"
|
|
165
|
-
Requires-Dist: filetype; extra == "all"
|
|
166
135
|
Requires-Dist: pydantic-settings; extra == "all"
|
|
167
136
|
Requires-Dist: flet-webview; extra == "all"
|
|
137
|
+
Requires-Dist: json_repair; extra == "all"
|
|
138
|
+
Requires-Dist: openai; extra == "all"
|
|
139
|
+
Requires-Dist: uvicorn[standard]; extra == "all"
|
|
140
|
+
Requires-Dist: tinynetrc; extra == "all"
|
|
141
|
+
Requires-Dist: pydantic; extra == "all"
|
|
142
|
+
Requires-Dist: pydevd-pycharm; extra == "all"
|
|
143
|
+
Requires-Dist: tokenizers; extra == "all"
|
|
144
|
+
Requires-Dist: logfire[fastapi]; extra == "all"
|
|
145
|
+
Requires-Dist: tabulate; extra == "all"
|
|
146
|
+
Requires-Dist: google-auth; extra == "all"
|
|
168
147
|
Requires-Dist: google-auth-oauthlib; extra == "all"
|
|
169
|
-
Requires-Dist:
|
|
148
|
+
Requires-Dist: logfire; extra == "all"
|
|
149
|
+
Requires-Dist: torchaudio; extra == "all"
|
|
150
|
+
Requires-Dist: setuptools; extra == "all"
|
|
151
|
+
Requires-Dist: pyyaml; extra == "all"
|
|
170
152
|
Requires-Dist: fastapi; extra == "all"
|
|
171
|
-
Requires-Dist: logfire[fastapi]; extra == "all"
|
|
172
153
|
Requires-Dist: pandas; extra == "all"
|
|
173
154
|
Requires-Dist: html2text; extra == "all"
|
|
174
|
-
Requires-Dist:
|
|
175
|
-
Requires-Dist:
|
|
176
|
-
Requires-Dist: json_repair; extra == "all"
|
|
155
|
+
Requires-Dist: google-api-python-client; extra == "all"
|
|
156
|
+
Requires-Dist: logfire[httpx]; extra == "all"
|
|
177
157
|
Requires-Dist: flet-video; extra == "all"
|
|
178
|
-
Requires-Dist: httpx; extra == "all"
|
|
179
|
-
Requires-Dist: faker; extra == "all"
|
|
180
158
|
Requires-Dist: peft; extra == "all"
|
|
181
|
-
Requires-Dist:
|
|
159
|
+
Requires-Dist: pydantic-ai[logfire,openai]; extra == "all"
|
|
160
|
+
Requires-Dist: diskcache; extra == "all"
|
|
161
|
+
Requires-Dist: flet[all]; extra == "all"
|
|
162
|
+
Requires-Dist: filetype; extra == "all"
|
|
163
|
+
Requires-Dist: semver; extra == "all"
|
|
182
164
|
Requires-Dist: transformers[sentencepiece]; extra == "all"
|
|
183
|
-
Requires-Dist:
|
|
165
|
+
Requires-Dist: deepmerge; extra == "all"
|
|
166
|
+
Requires-Dist: torchvision; extra == "all"
|
|
167
|
+
Requires-Dist: ollama; extra == "all"
|
|
168
|
+
Requires-Dist: faker; extra == "all"
|
|
169
|
+
Requires-Dist: google-auth-httplib2; extra == "all"
|
|
184
170
|
Requires-Dist: contexttimer; extra == "all"
|
|
171
|
+
Requires-Dist: pymupdf; extra == "all"
|
|
172
|
+
Requires-Dist: dask[bag]; extra == "all"
|
|
173
|
+
Requires-Dist: pytest-cov; extra == "all"
|
|
174
|
+
Requires-Dist: sre_yield; extra == "all"
|
|
175
|
+
Requires-Dist: regex; extra == "all"
|
|
176
|
+
Requires-Dist: dnspython[doh]; extra == "all"
|
|
177
|
+
Requires-Dist: appdirs; extra == "all"
|
|
178
|
+
Requires-Dist: bokeh; extra == "all"
|
|
179
|
+
Requires-Dist: httpx_retries; extra == "all"
|
|
180
|
+
Requires-Dist: Unidecode; extra == "all"
|
|
181
|
+
Requires-Dist: docker; extra == "all"
|
|
182
|
+
Requires-Dist: httpx; extra == "all"
|
|
185
183
|
Requires-Dist: huggingface_hub; extra == "all"
|
|
186
|
-
Requires-Dist:
|
|
184
|
+
Requires-Dist: sentence_transformers; extra == "all"
|
|
185
|
+
Requires-Dist: distributed; extra == "all"
|
|
186
|
+
Requires-Dist: openpyxl; extra == "all"
|
|
187
187
|
Dynamic: author
|
|
188
188
|
Dynamic: author-email
|
|
189
189
|
Dynamic: description
|
|
@@ -1,11 +1,12 @@
|
|
|
1
|
-
import dns
|
|
2
1
|
from dataclasses import dataclass
|
|
3
|
-
from dns import query
|
|
4
2
|
from functools import cached_property
|
|
3
|
+
|
|
4
|
+
import dns as dnspython
|
|
5
|
+
from dns import query
|
|
5
6
|
from httpx_retries import Retry, RetryTransport
|
|
6
7
|
|
|
7
8
|
from fmtr.tools import http_tools as http
|
|
8
|
-
from fmtr.tools.dns_tools.dm import Exchange, Response
|
|
9
|
+
from fmtr.tools.dns_tools.dm import Exchange, Response
|
|
9
10
|
from fmtr.tools.logging_tools import logger
|
|
10
11
|
|
|
11
12
|
RETRY_STRATEGY = Retry(
|
|
@@ -29,28 +30,36 @@ class HTTPClientDoH(http.Client):
|
|
|
29
30
|
TRANSPORT = RetryTransport(retry=RETRY_STRATEGY)
|
|
30
31
|
|
|
31
32
|
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
33
|
+
@dataclass
|
|
34
|
+
class Plain:
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
Plain DNS
|
|
38
|
+
|
|
39
|
+
"""
|
|
40
|
+
host: str
|
|
41
|
+
port: int = 53
|
|
36
42
|
|
|
37
43
|
def resolve(self, exchange: Exchange):
|
|
44
|
+
|
|
38
45
|
with logger.span(f'UDP {self.host}:{self.port}'):
|
|
39
|
-
|
|
40
|
-
|
|
46
|
+
response_plain = query.udp(q=exchange.query_last, where=self.host, port=self.port)
|
|
47
|
+
response = Response.from_message(response_plain)
|
|
48
|
+
|
|
49
|
+
exchange.response.message.answer += response.message.answer
|
|
41
50
|
|
|
42
51
|
|
|
43
52
|
@dataclass
|
|
44
|
-
class
|
|
53
|
+
class HTTP:
|
|
45
54
|
"""
|
|
46
55
|
|
|
47
|
-
|
|
56
|
+
DNS over HTTP
|
|
48
57
|
|
|
49
58
|
"""
|
|
50
59
|
|
|
51
60
|
HEADERS = {"Content-Type": "application/dns-message"}
|
|
52
61
|
CLIENT = HTTPClientDoH()
|
|
53
|
-
BOOTSTRAP =
|
|
62
|
+
BOOTSTRAP = Plain('8.8.8.8')
|
|
54
63
|
|
|
55
64
|
host: str
|
|
56
65
|
url: str
|
|
@@ -58,11 +67,10 @@ class ClientDoH:
|
|
|
58
67
|
|
|
59
68
|
@cached_property
|
|
60
69
|
def ip(self):
|
|
61
|
-
message =
|
|
62
|
-
|
|
63
|
-
exchange = Exchange(request=request, ip=None, port=None)
|
|
70
|
+
message = dnspython.message.make_query(self.host, dnspython.rdatatype.A, flags=0)
|
|
71
|
+
exchange = Exchange.from_wire(message.to_wire(), ip=None, port=None)
|
|
64
72
|
self.BOOTSTRAP.resolve(exchange)
|
|
65
|
-
ip = next(iter(exchange.
|
|
73
|
+
ip = next(iter(exchange.response.answer.items.keys())).address
|
|
66
74
|
return ip
|
|
67
75
|
|
|
68
76
|
def resolve(self, exchange: Exchange):
|
|
@@ -71,10 +79,11 @@ class ClientDoH:
|
|
|
71
79
|
Resolve via DoH
|
|
72
80
|
|
|
73
81
|
"""
|
|
74
|
-
|
|
82
|
+
|
|
75
83
|
headers = self.HEADERS | dict(Host=self.host)
|
|
76
84
|
url = self.url.format(host=self.ip)
|
|
77
|
-
response_doh = self.CLIENT.post(url, headers=headers, content=
|
|
85
|
+
response_doh = self.CLIENT.post(url, headers=headers, content=exchange.query_last.to_wire())
|
|
78
86
|
response_doh.raise_for_status()
|
|
79
87
|
response = Response.from_http(response_doh)
|
|
80
|
-
|
|
88
|
+
|
|
89
|
+
exchange.response.message.answer += response.message.answer
|
|
@@ -1,10 +1,12 @@
|
|
|
1
|
-
import dns
|
|
2
|
-
import httpx
|
|
3
1
|
from dataclasses import dataclass
|
|
4
|
-
from dns.message import Message
|
|
5
2
|
from functools import cached_property
|
|
6
3
|
from typing import Self, Optional
|
|
7
4
|
|
|
5
|
+
import dns
|
|
6
|
+
import httpx
|
|
7
|
+
from dns.message import Message, QueryMessage
|
|
8
|
+
from dns.rrset import RRset
|
|
9
|
+
|
|
8
10
|
|
|
9
11
|
@dataclass
|
|
10
12
|
class BaseDNSData:
|
|
@@ -33,14 +35,17 @@ class Response(BaseDNSData):
|
|
|
33
35
|
"""
|
|
34
36
|
|
|
35
37
|
http: Optional[httpx.Response] = None
|
|
38
|
+
is_complete: bool = False
|
|
36
39
|
|
|
37
40
|
@classmethod
|
|
38
41
|
def from_http(cls, response: httpx.Response) -> Self:
|
|
39
42
|
self = cls(response.content, http=response)
|
|
40
43
|
return self
|
|
41
44
|
|
|
42
|
-
@
|
|
43
|
-
def answer(self):
|
|
45
|
+
@property
|
|
46
|
+
def answer(self) -> Optional[RRset]:
|
|
47
|
+
if not self.message.answer:
|
|
48
|
+
return None
|
|
44
49
|
return self.message.answer[-1]
|
|
45
50
|
|
|
46
51
|
|
|
@@ -54,7 +59,7 @@ class Request(BaseDNSData):
|
|
|
54
59
|
wire: bytes
|
|
55
60
|
|
|
56
61
|
@cached_property
|
|
57
|
-
def question(self):
|
|
62
|
+
def question(self) -> RRset:
|
|
58
63
|
return self.message.question[0]
|
|
59
64
|
|
|
60
65
|
@cached_property
|
|
@@ -77,10 +82,14 @@ class Request(BaseDNSData):
|
|
|
77
82
|
def name_text(self):
|
|
78
83
|
return self.name.to_text()
|
|
79
84
|
|
|
85
|
+
def get_response_template(self):
|
|
86
|
+
message = dns.message.make_response(self.message)
|
|
87
|
+
message.flags |= dns.flags.RA
|
|
88
|
+
return message
|
|
89
|
+
|
|
80
90
|
@cached_property
|
|
81
91
|
def blackhole(self) -> Response:
|
|
82
|
-
blackhole =
|
|
83
|
-
blackhole.flags |= dns.flags.RA
|
|
92
|
+
blackhole = self.get_response_template()
|
|
84
93
|
blackhole.set_rcode(dns.rcode.NXDOMAIN)
|
|
85
94
|
response = Response.from_message(blackhole)
|
|
86
95
|
return response
|
|
@@ -98,13 +107,53 @@ class Exchange:
|
|
|
98
107
|
|
|
99
108
|
request: Request
|
|
100
109
|
response: Optional[Response] = None
|
|
101
|
-
|
|
110
|
+
|
|
102
111
|
|
|
103
112
|
@classmethod
|
|
104
113
|
def from_wire(cls, wire: bytes, ip: str, port: int) -> Self:
|
|
105
114
|
request = Request(wire)
|
|
106
|
-
|
|
115
|
+
response = Response.from_message(request.get_response_template())
|
|
116
|
+
|
|
117
|
+
return cls(request=request, response=response, ip=ip, port=port)
|
|
107
118
|
|
|
108
119
|
@cached_property
|
|
109
120
|
def client(self):
|
|
110
121
|
return f'{self.ip}:{self.port}'
|
|
122
|
+
|
|
123
|
+
@property
|
|
124
|
+
def question_last(self) -> RRset:
|
|
125
|
+
"""
|
|
126
|
+
|
|
127
|
+
Contrive an RRset representing the latest/current question. This can be the original question - or a hybrid one if we've injected our own answers into the exchange.
|
|
128
|
+
|
|
129
|
+
"""
|
|
130
|
+
if self.response.answer:
|
|
131
|
+
rrset = self.response.answer
|
|
132
|
+
ty = self.request.type
|
|
133
|
+
ttl = self.request.question.ttl
|
|
134
|
+
rdclass = self.request.question.rdclass
|
|
135
|
+
name = next(iter(rrset.items.keys())).to_text()
|
|
136
|
+
|
|
137
|
+
rrset_contrived = dns.rrset.from_text(
|
|
138
|
+
name=name,
|
|
139
|
+
ttl=ttl,
|
|
140
|
+
rdtype=ty,
|
|
141
|
+
rdclass=rdclass,
|
|
142
|
+
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
return rrset_contrived
|
|
146
|
+
else:
|
|
147
|
+
return self.request.question # Solves the issue of digging out the name.
|
|
148
|
+
|
|
149
|
+
@property
|
|
150
|
+
def query_last(self) -> QueryMessage:
|
|
151
|
+
"""
|
|
152
|
+
|
|
153
|
+
Create a query (e.g. for use by upstream) based on the last question.
|
|
154
|
+
|
|
155
|
+
"""
|
|
156
|
+
|
|
157
|
+
question_last = self.question_last
|
|
158
|
+
query = dns.message.make_query(qname=question_last.name, rdclass=question_last.rdclass, rdtype=question_last.rdtype)
|
|
159
|
+
return query
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
from fmtr.tools import logger
|
|
4
|
+
from fmtr.tools.dns_tools import server, client
|
|
5
|
+
from fmtr.tools.dns_tools.dm import Exchange
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class Proxy(server.Plain):
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
Base for a DNS Proxy server (plain server) TODO: Allow subclassing of any server type.
|
|
13
|
+
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
client: client.HTTP
|
|
17
|
+
|
|
18
|
+
def process_question(self, exchange: Exchange):
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
Modify exchange based on initial question.
|
|
22
|
+
|
|
23
|
+
"""
|
|
24
|
+
return
|
|
25
|
+
|
|
26
|
+
def process_upstream(self, exchange: Exchange):
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
Modify exchange after upstream response.
|
|
30
|
+
|
|
31
|
+
"""
|
|
32
|
+
return
|
|
33
|
+
|
|
34
|
+
def resolve(self, exchange: Exchange):
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
Resolve a request, processing each stage, initial question, upstream response etc.
|
|
38
|
+
Subclasses can override the relevant processing methods to implement custom behaviour.
|
|
39
|
+
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
request = exchange.request
|
|
43
|
+
|
|
44
|
+
with logger.span(f'Handling request {request.message.id=} {request.question=} {exchange.client=}...'):
|
|
45
|
+
|
|
46
|
+
if not request.is_valid:
|
|
47
|
+
raise ValueError(f'Only one question per request is supported. Got {len(request.question)} questions.')
|
|
48
|
+
|
|
49
|
+
with logger.span(f'Processing question...'):
|
|
50
|
+
self.process_question(exchange)
|
|
51
|
+
if exchange.response.is_complete:
|
|
52
|
+
return
|
|
53
|
+
|
|
54
|
+
with logger.span(f'Making upstream request...'):
|
|
55
|
+
self.client.resolve(exchange)
|
|
56
|
+
if exchange.response.is_complete:
|
|
57
|
+
return
|
|
58
|
+
|
|
59
|
+
with logger.span(f'Processing upstream response...'):
|
|
60
|
+
self.process_upstream(exchange)
|
|
61
|
+
if exchange.response.is_complete:
|
|
62
|
+
return
|
|
63
|
+
|
|
64
|
+
exchange.response.is_complete = True
|
|
65
|
+
|
|
66
|
+
logger.info(f'Resolution complete {request.message.id=} {exchange.response.answer=}')
|
|
67
|
+
return
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import socket
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
|
|
4
|
+
from fmtr.tools.dns_tools.dm import Exchange
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@dataclass
|
|
8
|
+
class Plain:
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
Base for starting a plain DNS server
|
|
12
|
+
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
host: str
|
|
16
|
+
port: int
|
|
17
|
+
|
|
18
|
+
def __post_init__(self):
|
|
19
|
+
|
|
20
|
+
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
21
|
+
|
|
22
|
+
def resolve(self, exchange: Exchange):
|
|
23
|
+
raise NotImplemented
|
|
24
|
+
|
|
25
|
+
def start(self):
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
Listen and resolve via overridden resolve method.
|
|
29
|
+
|
|
30
|
+
"""
|
|
31
|
+
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
32
|
+
sock.bind((self.host, self.port))
|
|
33
|
+
print(f"Listening on {self.host}:{self.port}")
|
|
34
|
+
while True:
|
|
35
|
+
data, (ip, port) = sock.recvfrom(512)
|
|
36
|
+
exchange = Exchange.from_wire(data, ip=ip, port=port)
|
|
37
|
+
self.resolve(exchange)
|
|
38
|
+
sock.sendto(exchange.response.message.to_wire(), (ip, port))
|
|
@@ -1,8 +1,9 @@
|
|
|
1
|
-
import regex as re
|
|
2
1
|
from dataclasses import dataclass, asdict
|
|
3
2
|
from functools import cached_property
|
|
4
3
|
from typing import List, Any
|
|
5
4
|
|
|
5
|
+
import regex as re
|
|
6
|
+
|
|
6
7
|
from fmtr.tools.logging_tools import logger
|
|
7
8
|
from fmtr.tools.string_tools import join
|
|
8
9
|
|
|
@@ -25,11 +26,6 @@ def alt(*patterns):
|
|
|
25
26
|
pattern = MASK_GROUP.format(pattern=pattern)
|
|
26
27
|
return pattern
|
|
27
28
|
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
29
|
@dataclass
|
|
34
30
|
class Key:
|
|
35
31
|
RECORD_SEP = '␞'
|
|
@@ -93,17 +89,18 @@ class Item:
|
|
|
93
89
|
Key-value pair
|
|
94
90
|
|
|
95
91
|
"""
|
|
96
|
-
|
|
97
|
-
|
|
92
|
+
source: Key
|
|
93
|
+
target: Key
|
|
98
94
|
|
|
99
95
|
@dataclass
|
|
100
|
-
class
|
|
96
|
+
class Transformer:
|
|
101
97
|
"""
|
|
102
98
|
|
|
103
99
|
Pattern-based, dictionary-like mapper.
|
|
104
|
-
Compiles
|
|
105
|
-
|
|
106
|
-
|
|
100
|
+
Compiles an complex set of rules into single regex pattern, and determines which rule matched.
|
|
101
|
+
Inputs are then transformed according to the matching rule.
|
|
102
|
+
Works like a pattern-based dictionary when is_recursive==False.
|
|
103
|
+
Works something like an FSA/transducer when is_recursive=True.
|
|
107
104
|
|
|
108
105
|
"""
|
|
109
106
|
PREFIX_GROUP = '__'
|
|
@@ -112,21 +109,21 @@ class Mapper:
|
|
|
112
109
|
is_recursive: bool = False
|
|
113
110
|
|
|
114
111
|
@cached_property
|
|
115
|
-
def pattern(self):
|
|
112
|
+
def pattern(self) -> str:
|
|
116
113
|
"""
|
|
117
114
|
|
|
118
|
-
|
|
115
|
+
Dynamically generated regex pattern based on the rules provided.
|
|
119
116
|
|
|
120
117
|
"""
|
|
121
118
|
patterns = [
|
|
122
|
-
MASK_NAMED.format(key=f'{self.PREFIX_GROUP}{i}', pattern=item.
|
|
119
|
+
MASK_NAMED.format(key=f'{self.PREFIX_GROUP}{i}', pattern=item.source.pattern)
|
|
123
120
|
for i, item in enumerate(self.items)
|
|
124
121
|
]
|
|
125
122
|
pattern = alt(*patterns)
|
|
126
123
|
return pattern
|
|
127
124
|
|
|
128
125
|
@cached_property
|
|
129
|
-
def rx(self):
|
|
126
|
+
def rx(self) -> re.Pattern:
|
|
130
127
|
"""
|
|
131
128
|
|
|
132
129
|
Regex object.
|
|
@@ -134,24 +131,26 @@ class Mapper:
|
|
|
134
131
|
"""
|
|
135
132
|
return re.compile(self.pattern)
|
|
136
133
|
|
|
137
|
-
def get_default(self, key: Key):
|
|
134
|
+
def get_default(self, key: Key) -> Any:
|
|
138
135
|
if self.is_recursive:
|
|
139
136
|
return key
|
|
140
137
|
else:
|
|
141
138
|
return self.default
|
|
142
139
|
|
|
143
|
-
def get(self, key: Key) -> Key:
|
|
140
|
+
def get(self, key: Key) -> Key | Any:
|
|
144
141
|
"""
|
|
145
142
|
|
|
146
143
|
Use recursive or single lookup pass, depending on whether recursive lookups have been specified.
|
|
147
144
|
|
|
148
145
|
"""
|
|
149
146
|
if self.is_recursive:
|
|
150
|
-
|
|
147
|
+
with logger.span(f'Transforming recursively {key=}...'):
|
|
148
|
+
return self.get_recursive(key)
|
|
151
149
|
else:
|
|
152
|
-
|
|
150
|
+
with logger.span(f'Transforming linearly {key=}...'):
|
|
151
|
+
return self.get_one(key)
|
|
153
152
|
|
|
154
|
-
def get_one(self, key: Key):
|
|
153
|
+
def get_one(self, key: Key) -> Key | Any:
|
|
155
154
|
"""
|
|
156
155
|
|
|
157
156
|
Single lookup pass.
|
|
@@ -163,7 +162,7 @@ class Mapper:
|
|
|
163
162
|
|
|
164
163
|
if not match:
|
|
165
164
|
value = self.get_default(key)
|
|
166
|
-
logger.debug(f'No match for {key=}.')
|
|
165
|
+
logger.debug(f'No match for {key=}. Returning {self.default=}')
|
|
167
166
|
else:
|
|
168
167
|
|
|
169
168
|
match_ids = {name: v for name, v in match.groupdict().items() if v}
|
|
@@ -179,19 +178,21 @@ class Mapper:
|
|
|
179
178
|
rule_id = next(iter(rule_ids))
|
|
180
179
|
rule = self.items[rule_id]
|
|
181
180
|
|
|
182
|
-
|
|
183
|
-
|
|
181
|
+
logger.debug(f'Matched using {rule_id=}: {rule.source=}')
|
|
182
|
+
|
|
183
|
+
if isinstance(rule.target, Key):
|
|
184
|
+
value = rule.target.transform(match)
|
|
184
185
|
else:
|
|
185
|
-
value = rule.
|
|
186
|
+
value = rule.target
|
|
186
187
|
|
|
187
|
-
logger.debug(f'
|
|
188
|
+
logger.debug(f'Transformed using {rule_id=}: {key=} → {value=}')
|
|
188
189
|
|
|
189
190
|
return value
|
|
190
191
|
|
|
191
|
-
def get_recursive(self, key: Key) -> Key:
|
|
192
|
+
def get_recursive(self, key: Key) -> Key | Any:
|
|
192
193
|
"""
|
|
193
194
|
|
|
194
|
-
Lookup the provided
|
|
195
|
+
Lookup the provided key by continuously applying transforms until no changes are made
|
|
195
196
|
or a circular loop is detected.
|
|
196
197
|
|
|
197
198
|
"""
|
|
@@ -201,29 +202,28 @@ class Mapper:
|
|
|
201
202
|
def get_history_str():
|
|
202
203
|
return join(history, sep=' → ')
|
|
203
204
|
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
if previous in history:
|
|
207
|
-
history.append(previous)
|
|
208
|
-
msg = f'Loop detected on node "{previous}": {get_history_str()}'
|
|
209
|
-
raise RewriteCircularLoopError(msg)
|
|
210
|
-
|
|
205
|
+
while True:
|
|
206
|
+
if previous in history:
|
|
211
207
|
history.append(previous)
|
|
208
|
+
msg = f'Loop detected on node "{previous}": {get_history_str()}'
|
|
209
|
+
raise RewriteCircularLoopError(msg)
|
|
212
210
|
|
|
213
|
-
|
|
211
|
+
history.append(previous)
|
|
212
|
+
new = previous
|
|
213
|
+
new = self.get_one(new)
|
|
214
|
+
if new == previous:
|
|
215
|
+
break
|
|
216
|
+
previous = new
|
|
214
217
|
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
break
|
|
219
|
-
|
|
220
|
-
previous = new
|
|
218
|
+
if not isinstance(new, Key):
|
|
219
|
+
history.append(previous)
|
|
220
|
+
break
|
|
221
221
|
|
|
222
222
|
if len(history) == 1:
|
|
223
|
-
history_str = 'No
|
|
223
|
+
history_str = 'No transforms performed.'
|
|
224
224
|
else:
|
|
225
225
|
history_str = get_history_str()
|
|
226
|
-
logger.debug(f'Finished
|
|
226
|
+
logger.debug(f'Finished transforming: {history_str}')
|
|
227
227
|
|
|
228
228
|
return previous
|
|
229
229
|
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
1.3.3
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: fmtr.tools
|
|
3
|
-
Version: 1.3.
|
|
3
|
+
Version: 1.3.3
|
|
4
4
|
Summary: Collection of high-level tools to simplify everyday development tasks, with a focus on AI/ML
|
|
5
5
|
Home-page: https://github.com/fmtr/fmtr.tools
|
|
6
6
|
Author: Frontmatter
|
|
@@ -130,60 +130,60 @@ Requires-Dist: logfire[httpx]; extra == "http"
|
|
|
130
130
|
Provides-Extra: setup
|
|
131
131
|
Requires-Dist: setuptools; extra == "setup"
|
|
132
132
|
Provides-Extra: all
|
|
133
|
-
Requires-Dist: deepmerge; extra == "all"
|
|
134
|
-
Requires-Dist: bokeh; extra == "all"
|
|
135
|
-
Requires-Dist: distributed; extra == "all"
|
|
136
|
-
Requires-Dist: pydantic; extra == "all"
|
|
137
|
-
Requires-Dist: appdirs; extra == "all"
|
|
138
|
-
Requires-Dist: sre_yield; extra == "all"
|
|
139
|
-
Requires-Dist: logfire; extra == "all"
|
|
140
|
-
Requires-Dist: ollama; extra == "all"
|
|
141
|
-
Requires-Dist: Unidecode; extra == "all"
|
|
142
|
-
Requires-Dist: flet[all]; extra == "all"
|
|
143
|
-
Requires-Dist: google-auth-httplib2; extra == "all"
|
|
144
|
-
Requires-Dist: pydantic-ai[logfire,openai]; extra == "all"
|
|
145
|
-
Requires-Dist: pydevd-pycharm; extra == "all"
|
|
146
|
-
Requires-Dist: torchvision; extra == "all"
|
|
147
|
-
Requires-Dist: pyyaml; extra == "all"
|
|
148
|
-
Requires-Dist: tabulate; extra == "all"
|
|
149
|
-
Requires-Dist: pymupdf; extra == "all"
|
|
150
|
-
Requires-Dist: docker; extra == "all"
|
|
151
|
-
Requires-Dist: openai; extra == "all"
|
|
152
|
-
Requires-Dist: sentence_transformers; extra == "all"
|
|
153
|
-
Requires-Dist: dnspython[doh]; extra == "all"
|
|
154
|
-
Requires-Dist: regex; extra == "all"
|
|
155
|
-
Requires-Dist: semver; extra == "all"
|
|
156
|
-
Requires-Dist: setuptools; extra == "all"
|
|
157
|
-
Requires-Dist: google-auth; extra == "all"
|
|
158
|
-
Requires-Dist: openpyxl; extra == "all"
|
|
159
|
-
Requires-Dist: google-api-python-client; extra == "all"
|
|
160
|
-
Requires-Dist: torchaudio; extra == "all"
|
|
161
|
-
Requires-Dist: tinynetrc; extra == "all"
|
|
162
|
-
Requires-Dist: httpx_retries; extra == "all"
|
|
163
133
|
Requires-Dist: pymupdf4llm; extra == "all"
|
|
164
134
|
Requires-Dist: yamlscript; extra == "all"
|
|
165
|
-
Requires-Dist: filetype; extra == "all"
|
|
166
135
|
Requires-Dist: pydantic-settings; extra == "all"
|
|
167
136
|
Requires-Dist: flet-webview; extra == "all"
|
|
137
|
+
Requires-Dist: json_repair; extra == "all"
|
|
138
|
+
Requires-Dist: openai; extra == "all"
|
|
139
|
+
Requires-Dist: uvicorn[standard]; extra == "all"
|
|
140
|
+
Requires-Dist: tinynetrc; extra == "all"
|
|
141
|
+
Requires-Dist: pydantic; extra == "all"
|
|
142
|
+
Requires-Dist: pydevd-pycharm; extra == "all"
|
|
143
|
+
Requires-Dist: tokenizers; extra == "all"
|
|
144
|
+
Requires-Dist: logfire[fastapi]; extra == "all"
|
|
145
|
+
Requires-Dist: tabulate; extra == "all"
|
|
146
|
+
Requires-Dist: google-auth; extra == "all"
|
|
168
147
|
Requires-Dist: google-auth-oauthlib; extra == "all"
|
|
169
|
-
Requires-Dist:
|
|
148
|
+
Requires-Dist: logfire; extra == "all"
|
|
149
|
+
Requires-Dist: torchaudio; extra == "all"
|
|
150
|
+
Requires-Dist: setuptools; extra == "all"
|
|
151
|
+
Requires-Dist: pyyaml; extra == "all"
|
|
170
152
|
Requires-Dist: fastapi; extra == "all"
|
|
171
|
-
Requires-Dist: logfire[fastapi]; extra == "all"
|
|
172
153
|
Requires-Dist: pandas; extra == "all"
|
|
173
154
|
Requires-Dist: html2text; extra == "all"
|
|
174
|
-
Requires-Dist:
|
|
175
|
-
Requires-Dist:
|
|
176
|
-
Requires-Dist: json_repair; extra == "all"
|
|
155
|
+
Requires-Dist: google-api-python-client; extra == "all"
|
|
156
|
+
Requires-Dist: logfire[httpx]; extra == "all"
|
|
177
157
|
Requires-Dist: flet-video; extra == "all"
|
|
178
|
-
Requires-Dist: httpx; extra == "all"
|
|
179
|
-
Requires-Dist: faker; extra == "all"
|
|
180
158
|
Requires-Dist: peft; extra == "all"
|
|
181
|
-
Requires-Dist:
|
|
159
|
+
Requires-Dist: pydantic-ai[logfire,openai]; extra == "all"
|
|
160
|
+
Requires-Dist: diskcache; extra == "all"
|
|
161
|
+
Requires-Dist: flet[all]; extra == "all"
|
|
162
|
+
Requires-Dist: filetype; extra == "all"
|
|
163
|
+
Requires-Dist: semver; extra == "all"
|
|
182
164
|
Requires-Dist: transformers[sentencepiece]; extra == "all"
|
|
183
|
-
Requires-Dist:
|
|
165
|
+
Requires-Dist: deepmerge; extra == "all"
|
|
166
|
+
Requires-Dist: torchvision; extra == "all"
|
|
167
|
+
Requires-Dist: ollama; extra == "all"
|
|
168
|
+
Requires-Dist: faker; extra == "all"
|
|
169
|
+
Requires-Dist: google-auth-httplib2; extra == "all"
|
|
184
170
|
Requires-Dist: contexttimer; extra == "all"
|
|
171
|
+
Requires-Dist: pymupdf; extra == "all"
|
|
172
|
+
Requires-Dist: dask[bag]; extra == "all"
|
|
173
|
+
Requires-Dist: pytest-cov; extra == "all"
|
|
174
|
+
Requires-Dist: sre_yield; extra == "all"
|
|
175
|
+
Requires-Dist: regex; extra == "all"
|
|
176
|
+
Requires-Dist: dnspython[doh]; extra == "all"
|
|
177
|
+
Requires-Dist: appdirs; extra == "all"
|
|
178
|
+
Requires-Dist: bokeh; extra == "all"
|
|
179
|
+
Requires-Dist: httpx_retries; extra == "all"
|
|
180
|
+
Requires-Dist: Unidecode; extra == "all"
|
|
181
|
+
Requires-Dist: docker; extra == "all"
|
|
182
|
+
Requires-Dist: httpx; extra == "all"
|
|
185
183
|
Requires-Dist: huggingface_hub; extra == "all"
|
|
186
|
-
Requires-Dist:
|
|
184
|
+
Requires-Dist: sentence_transformers; extra == "all"
|
|
185
|
+
Requires-Dist: distributed; extra == "all"
|
|
186
|
+
Requires-Dist: openpyxl; extra == "all"
|
|
187
187
|
Dynamic: author
|
|
188
188
|
Dynamic: author-email
|
|
189
189
|
Dynamic: description
|
|
@@ -57,6 +57,7 @@ setup.py
|
|
|
57
57
|
./fmtr/tools/dns_tools/__init__.py
|
|
58
58
|
./fmtr/tools/dns_tools/client.py
|
|
59
59
|
./fmtr/tools/dns_tools/dm.py
|
|
60
|
+
./fmtr/tools/dns_tools/proxy.py
|
|
60
61
|
./fmtr/tools/dns_tools/server.py
|
|
61
62
|
./fmtr/tools/entrypoints/__init__.py
|
|
62
63
|
./fmtr/tools/entrypoints/cache_hfh.py
|
|
@@ -138,6 +139,7 @@ fmtr/tools/ai_tools/inference_tools.py
|
|
|
138
139
|
fmtr/tools/dns_tools/__init__.py
|
|
139
140
|
fmtr/tools/dns_tools/client.py
|
|
140
141
|
fmtr/tools/dns_tools/dm.py
|
|
142
|
+
fmtr/tools/dns_tools/proxy.py
|
|
141
143
|
fmtr/tools/dns_tools/server.py
|
|
142
144
|
fmtr/tools/entrypoints/__init__.py
|
|
143
145
|
fmtr/tools/entrypoints/cache_hfh.py
|
|
@@ -1,89 +0,0 @@
|
|
|
1
|
-
import socket
|
|
2
|
-
from dataclasses import dataclass
|
|
3
|
-
|
|
4
|
-
from fmtr.tools import logger
|
|
5
|
-
from fmtr.tools.dns_tools.client import ClientDoH
|
|
6
|
-
from fmtr.tools.dns_tools.dm import Exchange
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
@dataclass
|
|
10
|
-
class ServerBasePlain:
|
|
11
|
-
"""
|
|
12
|
-
|
|
13
|
-
Base for starting a plain DNS server
|
|
14
|
-
|
|
15
|
-
"""
|
|
16
|
-
|
|
17
|
-
host: str
|
|
18
|
-
port: int
|
|
19
|
-
|
|
20
|
-
def __post_init__(self):
|
|
21
|
-
|
|
22
|
-
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
23
|
-
|
|
24
|
-
def resolve(self, exchange: Exchange):
|
|
25
|
-
raise NotImplemented
|
|
26
|
-
|
|
27
|
-
def start(self):
|
|
28
|
-
"""
|
|
29
|
-
|
|
30
|
-
Listen and resolve via overridden resolve method.
|
|
31
|
-
|
|
32
|
-
"""
|
|
33
|
-
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
34
|
-
sock.bind((self.host, self.port))
|
|
35
|
-
print(f"Listening on {self.host}:{self.port}")
|
|
36
|
-
while True:
|
|
37
|
-
data, (ip, port) = sock.recvfrom(512)
|
|
38
|
-
exchange = Exchange.from_wire(data, ip=ip, port=port)
|
|
39
|
-
self.resolve(exchange)
|
|
40
|
-
sock.sendto(exchange.response.wire, (ip, port))
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
@dataclass
|
|
44
|
-
class ServerBaseDoHProxy(ServerBasePlain):
|
|
45
|
-
"""
|
|
46
|
-
|
|
47
|
-
Base for a DNS Proxy server
|
|
48
|
-
|
|
49
|
-
"""
|
|
50
|
-
|
|
51
|
-
client: ClientDoH
|
|
52
|
-
|
|
53
|
-
def process_question(self, exchange: Exchange):
|
|
54
|
-
return
|
|
55
|
-
|
|
56
|
-
def process_upstream(self, exchange: Exchange):
|
|
57
|
-
return
|
|
58
|
-
|
|
59
|
-
def resolve(self, exchange: Exchange):
|
|
60
|
-
"""
|
|
61
|
-
|
|
62
|
-
Resolve a request, processing each stage, initial question, upstream response etc.
|
|
63
|
-
Subclasses can override the relevant processing methods to implement custom behaviour.
|
|
64
|
-
|
|
65
|
-
"""
|
|
66
|
-
|
|
67
|
-
request = exchange.request
|
|
68
|
-
|
|
69
|
-
with logger.span(f'Handling request ID {request.message.id} for {request.name_text} from {exchange.client}...'):
|
|
70
|
-
|
|
71
|
-
if not request.is_valid:
|
|
72
|
-
raise ValueError(f'Only one question per request is supported. Got {len(request.question)} questions.')
|
|
73
|
-
|
|
74
|
-
with logger.span(f'Processing question...'):
|
|
75
|
-
self.process_question(exchange)
|
|
76
|
-
if exchange.response:
|
|
77
|
-
return
|
|
78
|
-
|
|
79
|
-
with logger.span(f'Making upstream request for {request.name_text}...'):
|
|
80
|
-
self.client.resolve(exchange)
|
|
81
|
-
|
|
82
|
-
with logger.span(f'Processing upstream response...'):
|
|
83
|
-
self.process_upstream(exchange)
|
|
84
|
-
|
|
85
|
-
if exchange.response:
|
|
86
|
-
return
|
|
87
|
-
|
|
88
|
-
exchange.response = exchange.response_upstream
|
|
89
|
-
return
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
1.3.1
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
@@ -15,60 +15,60 @@ pydantic-ai[logfire,openai]
|
|
|
15
15
|
ollama
|
|
16
16
|
|
|
17
17
|
[all]
|
|
18
|
-
deepmerge
|
|
19
|
-
bokeh
|
|
20
|
-
distributed
|
|
21
|
-
pydantic
|
|
22
|
-
appdirs
|
|
23
|
-
sre_yield
|
|
24
|
-
logfire
|
|
25
|
-
ollama
|
|
26
|
-
Unidecode
|
|
27
|
-
flet[all]
|
|
28
|
-
google-auth-httplib2
|
|
29
|
-
pydantic-ai[logfire,openai]
|
|
30
|
-
pydevd-pycharm
|
|
31
|
-
torchvision
|
|
32
|
-
pyyaml
|
|
33
|
-
tabulate
|
|
34
|
-
pymupdf
|
|
35
|
-
docker
|
|
36
|
-
openai
|
|
37
|
-
sentence_transformers
|
|
38
|
-
dnspython[doh]
|
|
39
|
-
regex
|
|
40
|
-
semver
|
|
41
|
-
setuptools
|
|
42
|
-
google-auth
|
|
43
|
-
openpyxl
|
|
44
|
-
google-api-python-client
|
|
45
|
-
torchaudio
|
|
46
|
-
tinynetrc
|
|
47
|
-
httpx_retries
|
|
48
18
|
pymupdf4llm
|
|
49
19
|
yamlscript
|
|
50
|
-
filetype
|
|
51
20
|
pydantic-settings
|
|
52
21
|
flet-webview
|
|
22
|
+
json_repair
|
|
23
|
+
openai
|
|
24
|
+
uvicorn[standard]
|
|
25
|
+
tinynetrc
|
|
26
|
+
pydantic
|
|
27
|
+
pydevd-pycharm
|
|
28
|
+
tokenizers
|
|
29
|
+
logfire[fastapi]
|
|
30
|
+
tabulate
|
|
31
|
+
google-auth
|
|
53
32
|
google-auth-oauthlib
|
|
54
|
-
|
|
33
|
+
logfire
|
|
34
|
+
torchaudio
|
|
35
|
+
setuptools
|
|
36
|
+
pyyaml
|
|
55
37
|
fastapi
|
|
56
|
-
logfire[fastapi]
|
|
57
38
|
pandas
|
|
58
39
|
html2text
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
json_repair
|
|
40
|
+
google-api-python-client
|
|
41
|
+
logfire[httpx]
|
|
62
42
|
flet-video
|
|
63
|
-
httpx
|
|
64
|
-
faker
|
|
65
43
|
peft
|
|
66
|
-
|
|
44
|
+
pydantic-ai[logfire,openai]
|
|
45
|
+
diskcache
|
|
46
|
+
flet[all]
|
|
47
|
+
filetype
|
|
48
|
+
semver
|
|
67
49
|
transformers[sentencepiece]
|
|
68
|
-
|
|
50
|
+
deepmerge
|
|
51
|
+
torchvision
|
|
52
|
+
ollama
|
|
53
|
+
faker
|
|
54
|
+
google-auth-httplib2
|
|
69
55
|
contexttimer
|
|
56
|
+
pymupdf
|
|
57
|
+
dask[bag]
|
|
58
|
+
pytest-cov
|
|
59
|
+
sre_yield
|
|
60
|
+
regex
|
|
61
|
+
dnspython[doh]
|
|
62
|
+
appdirs
|
|
63
|
+
bokeh
|
|
64
|
+
httpx_retries
|
|
65
|
+
Unidecode
|
|
66
|
+
docker
|
|
67
|
+
httpx
|
|
70
68
|
huggingface_hub
|
|
71
|
-
|
|
69
|
+
sentence_transformers
|
|
70
|
+
distributed
|
|
71
|
+
openpyxl
|
|
72
72
|
|
|
73
73
|
[api]
|
|
74
74
|
fastapi
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|