fmtr.tools 1.2.5__py3-none-any.whl → 1.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fmtr.tools might be problematic. Click here for more details.

@@ -1,4 +1,5 @@
1
1
  import dns
2
+ from dataclasses import dataclass
2
3
  from dns import query
3
4
  from functools import cached_property
4
5
  from httpx_retries import Retry, RetryTransport
@@ -39,6 +40,7 @@ class ClientBasePlain:
39
40
  exchange.response_upstream = Response.from_message(response)
40
41
 
41
42
 
43
+ @dataclass
42
44
  class ClientDoH:
43
45
  """
44
46
 
@@ -47,19 +49,19 @@ class ClientDoH:
47
49
  """
48
50
 
49
51
  HEADERS = {"Content-Type": "application/dns-message"}
50
- client = HTTPClientDoH()
52
+ CLIENT = HTTPClientDoH()
53
+ BOOTSTRAP = ClientBasePlain('8.8.8.8')
54
+
55
+ host: str
56
+ url: str
51
57
 
52
- def __init__(self, host, url):
53
- self.host = host
54
- self.url = url
55
- self.bootstrap = ClientBasePlain('8.8.8.8')
56
58
 
57
59
  @cached_property
58
60
  def ip(self):
59
61
  message = dns.message.make_query(self.host, dns.rdatatype.A, flags=0)
60
62
  request = Request.from_message(message)
61
63
  exchange = Exchange(request=request, ip=None, port=None)
62
- self.bootstrap.resolve(exchange)
64
+ self.BOOTSTRAP.resolve(exchange)
63
65
  ip = next(iter(exchange.response_upstream.answer.items.keys())).address
64
66
  return ip
65
67
 
@@ -72,7 +74,7 @@ class ClientDoH:
72
74
  request = exchange.request
73
75
  headers = self.HEADERS | dict(Host=self.host)
74
76
  url = self.url.format(host=self.ip)
75
- response_doh = self.client.post(url, headers=headers, content=request.wire)
77
+ response_doh = self.CLIENT.post(url, headers=headers, content=request.wire)
76
78
  response_doh.raise_for_status()
77
79
  response = Response.from_http(response_doh)
78
80
  exchange.response_upstream = response
@@ -1,10 +1,12 @@
1
1
  import socket
2
+ from dataclasses import dataclass
2
3
 
3
4
  from fmtr.tools import logger
4
5
  from fmtr.tools.dns_tools.client import ClientDoH
5
- from fmtr.tools.dns_tools.dm import Exchange, Response
6
+ from fmtr.tools.dns_tools.dm import Exchange
6
7
 
7
8
 
9
+ @dataclass
8
10
  class ServerBasePlain:
9
11
  """
10
12
 
@@ -12,9 +14,11 @@ class ServerBasePlain:
12
14
 
13
15
  """
14
16
 
15
- def __init__(self, host, port):
16
- self.host = host
17
- self.port = port
17
+ host: str
18
+ port: int
19
+
20
+ def __post_init__(self):
21
+
18
22
  self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
19
23
 
20
24
  def resolve(self, exchange: Exchange):
@@ -36,6 +40,7 @@ class ServerBasePlain:
36
40
  sock.sendto(exchange.response.wire, (ip, port))
37
41
 
38
42
 
43
+ @dataclass
39
44
  class ServerBaseDoHProxy(ServerBasePlain):
40
45
  """
41
46
 
@@ -43,9 +48,7 @@ class ServerBaseDoHProxy(ServerBasePlain):
43
48
 
44
49
  """
45
50
 
46
- def __init__(self, host, port, client: ClientDoH):
47
- super().__init__(host, port)
48
- self.client = client
51
+ client: ClientDoH
49
52
 
50
53
  def process_question(self, exchange: Exchange):
51
54
  return
@@ -53,16 +56,6 @@ class ServerBaseDoHProxy(ServerBasePlain):
53
56
  def process_upstream(self, exchange: Exchange):
54
57
  return
55
58
 
56
- def from_upstream(self, exchange: Exchange) -> Exchange:
57
-
58
- request = exchange.request
59
- response_doh = self.client.post(self.URL, headers=self.HEADERS, content=request.wire)
60
- response_doh.raise_for_status()
61
- response = Response.from_http(response_doh)
62
- exchange.response_upstream = response
63
-
64
- return exchange
65
-
66
59
  def resolve(self, exchange: Exchange):
67
60
  """
68
61
 
@@ -1,9 +1,10 @@
1
1
  import regex as re
2
- from dataclasses import dataclass
2
+ from dataclasses import dataclass, asdict
3
3
  from functools import cached_property
4
- from typing import List
4
+ from typing import List, Any
5
5
 
6
6
  from fmtr.tools.logging_tools import logger
7
+ from fmtr.tools.string_tools import join
7
8
 
8
9
 
9
10
  class RewriteCircularLoopError(Exception):
@@ -14,59 +15,101 @@ class RewriteCircularLoopError(Exception):
14
15
  """
15
16
 
16
17
 
18
+ MASK_GROUP = '(?:{pattern})'
19
+ MASK_NAMED = r"(?P<{key}>{pattern})"
20
+
21
+
22
+ def alt(*patterns):
23
+ patterns = sorted(patterns, key=len, reverse=True)
24
+ pattern = '|'.join(patterns)
25
+ pattern = MASK_GROUP.format(pattern=pattern)
26
+ return pattern
27
+
28
+
29
+
30
+
31
+
32
+
17
33
  @dataclass
18
- class Rewrite:
19
- """
20
- Represents a single rule for pattern matching and target string replacement.
34
+ class Key:
35
+ RECORD_SEP = '␞'
21
36
 
22
- This class is used to define a rule with a pattern and a target string.
23
- The `pattern` is a regular expression used to identify matches in input text.
24
- The `target` allows rewriting the identified matches with a formatted string.
25
- It provides properties for generating a unique identifier for use as a regex group name and compiling the provided pattern into a regular expression object.
37
+ def flatten(self, data):
38
+ """
26
39
 
27
- """
28
- pattern: str
29
- target: str
40
+ Flatten/serialise dictionary data
41
+
42
+ """
43
+ pairs = [f'{value}' for key, value in data.items()]
44
+ string = self.RECORD_SEP.join(pairs)
45
+ return string
30
46
 
31
47
  @cached_property
32
- def id(self):
48
+ def pattern(self):
33
49
  """
34
-
35
- Regex group name.
36
-
50
+
51
+ Serialise to pattern
52
+
37
53
  """
38
- return f'id{abs(hash(self.pattern))}'
54
+ data = {key: MASK_NAMED.format(key=key, pattern=value) for key, value in asdict(self).items()}
55
+ pattern = self.flatten(data)
56
+ return pattern
39
57
 
40
58
  @cached_property
41
59
  def rx(self):
42
60
  """
43
61
 
44
- Regex object.
62
+ Compile to Regular Expression
45
63
 
46
64
  """
47
65
  return re.compile(self.pattern)
48
66
 
49
- def apply(self, match: re.Match):
67
+ @cached_property
68
+ def string(self):
69
+ """
70
+
71
+ Serialise to string
72
+
73
+ """
74
+ string = self.flatten(asdict(self))
75
+ return string
76
+
77
+ def transform(self, match: re.Match):
50
78
  """
51
79
 
52
- Rewrite using the target string and match groups.
80
+ Transform match object into a new object of the same type.
53
81
 
54
82
  """
55
- target = self.target.format(**match.groupdict())
56
- return target
83
+ groupdict = match.groupdict()
84
+ data = {key: value.format(**groupdict) for key, value in asdict(self).items()}
85
+ obj = self.__class__(**data)
86
+ return obj
87
+
88
+
89
+ @dataclass
90
+ class Item:
91
+ """
92
+
93
+ Key-value pair
57
94
 
95
+ """
96
+ key: Key
97
+ value: Key
58
98
 
59
99
  @dataclass
60
- class Rewriter:
100
+ class Mapper:
61
101
  """
62
102
 
63
- Represents a Rewriter class that handles pattern matching, rule application, and text rewriting.
103
+ Pattern-based, dictionary-like mapper.
64
104
  Compiles a single regex pattern from a list of rules, and determines which rule matched.
65
- It supports initialization from structured rule data, execution of a single rewrite pass, and
66
- recursive rewriting until a stable state is reached.
105
+ It supports initialization from structured rule data, execution of a single lookup pass, and
106
+ recursive lookups until a stable state is reached.
67
107
 
68
108
  """
69
- rules: List[Rewrite]
109
+ PREFIX_GROUP = '__'
110
+ items: List[Item]
111
+ default: Any = None
112
+ is_recursive: bool = False
70
113
 
71
114
  @cached_property
72
115
  def pattern(self):
@@ -75,71 +118,90 @@ class Rewriter:
75
118
  Provides a dynamically generated regex pattern based on the rules provided.
76
119
 
77
120
  """
78
- patterns = [fr"(?P<{rule.id}>{rule.pattern})" for rule in self.rules]
79
- sorted(patterns, key=len, reverse=True)
80
- pattern = '|'.join(patterns)
121
+ patterns = [
122
+ MASK_NAMED.format(key=f'{self.PREFIX_GROUP}{i}', pattern=item.key.pattern)
123
+ for i, item in enumerate(self.items)
124
+ ]
125
+ pattern = alt(*patterns)
81
126
  return pattern
82
127
 
83
128
  @cached_property
84
- def rule_lookup(self):
129
+ def rx(self):
85
130
  """
86
131
 
87
- Dictionary mapping rule identifiers to their corresponding rules.
132
+ Regex object.
133
+
88
134
  """
135
+ return re.compile(self.pattern)
89
136
 
90
- return {rule.id: rule for rule in self.rules}
137
+ def get_default(self, key: Key):
138
+ if self.is_recursive:
139
+ return key
140
+ else:
141
+ return self.default
91
142
 
92
- @cached_property
93
- def rx(self):
143
+ def get(self, key: Key) -> Key:
94
144
  """
95
145
 
96
- Regex object.
146
+ Use recursive or single lookup pass, depending on whether recursive lookups have been specified.
97
147
 
98
148
  """
99
- return re.compile(self.pattern)
149
+ if self.is_recursive:
150
+ return self.get_recursive(key)
151
+ else:
152
+ return self.get_one(key)
100
153
 
101
- def rewrite_pass(self, source: str):
154
+ def get_one(self, key: Key):
102
155
  """
103
156
 
104
- Single rewrite pass.
105
- Rewrites the provided source string based on the matching rule.
157
+ Single lookup pass.
158
+ Lookup the source string based on the matching rule.
106
159
 
107
160
  """
108
161
 
109
- match = self.rx.fullmatch(source)
162
+ match = self.rx.fullmatch(key.string)
110
163
 
111
164
  if not match:
112
- return source
165
+ value = self.get_default(key)
166
+ logger.debug(f'No match for {key=}.')
167
+ else:
168
+
169
+ match_ids = {name: v for name, v in match.groupdict().items() if v}
170
+ rule_ids = {
171
+ int(id.removeprefix(self.PREFIX_GROUP))
172
+ for id in match_ids.keys() if id.startswith(self.PREFIX_GROUP)
173
+ }
113
174
 
114
- match_ids = {k: v for k, v in match.groupdict().items() if v}
115
- match_id = match_ids & self.rule_lookup.keys()
175
+ if len(rule_ids) != 1:
176
+ msg = f'Multiple group matches: {rule_ids}'
177
+ raise ValueError(msg)
116
178
 
117
- if len(match_id) != 1:
118
- msg = f'Multiple group matches: {match_id}'
119
- raise ValueError(msg)
179
+ rule_id = next(iter(rule_ids))
180
+ rule = self.items[rule_id]
120
181
 
121
- match_id = next(iter(match_id))
122
- rule = self.rule_lookup[match_id]
123
- target = rule.apply(match)
182
+ if isinstance(rule.value, Key):
183
+ value = rule.value.transform(match)
184
+ else:
185
+ value = rule.value
124
186
 
125
- logger.debug(f'Rewrote using {match_id=}: {source=} -> {target=}')
187
+ logger.debug(f'Matched using {rule_id=}: {key=} {value=}')
126
188
 
127
- return target
189
+ return value
128
190
 
129
- def rewrite(self, source: str) -> str:
191
+ def get_recursive(self, key: Key) -> Key:
130
192
  """
131
193
 
132
- Rewrites the provided text by continuously applying rewrite rules until no changes are made
194
+ Lookup the provided text by continuously applying lookup rules until no changes are made
133
195
  or a circular loop is detected.
134
196
 
135
197
  """
136
198
  history = []
137
- previous = source
199
+ previous = key
138
200
 
139
201
  def get_history_str():
140
- return ' -> '.join(history)
202
+ return join(history, sep=' ')
141
203
 
142
- with logger.span(f'Rewriting "{source}"...'):
204
+ with logger.span(f'Matching {key=}...'):
143
205
  while True:
144
206
  if previous in history:
145
207
  history.append(previous)
@@ -150,7 +212,7 @@ class Rewriter:
150
212
 
151
213
  new = previous
152
214
 
153
- new = self.rewrite_pass(new)
215
+ new = self.get_one(new)
154
216
 
155
217
  if new == previous:
156
218
  break
@@ -158,18 +220,13 @@ class Rewriter:
158
220
  previous = new
159
221
 
160
222
  if len(history) == 1:
161
- history_str = 'No rewrites performed.'
223
+ history_str = 'No matching performed.'
162
224
  else:
163
225
  history_str = get_history_str()
164
- logger.debug(f'Finished rewriting: {history_str}')
226
+ logger.debug(f'Finished matching: {history_str}')
165
227
 
166
228
  return previous
167
229
 
168
- @classmethod
169
- def from_data(cls, data):
170
- rules = [Rewrite(*pair) for pair in data.items()]
171
- self = cls(rules=rules)
172
- return self
173
-
174
-
175
230
 
231
+ if __name__ == '__main__':
232
+ ...
fmtr/tools/version CHANGED
@@ -1 +1 @@
1
- 1.2.5
1
+ 1.2.7
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fmtr.tools
3
- Version: 1.2.5
3
+ Version: 1.2.7
4
4
  Summary: Collection of high-level tools to simplify everyday development tasks, with a focus on AI/ML
5
5
  Home-page: https://github.com/fmtr/tools
6
6
  Author: Frontmatter
@@ -106,6 +106,8 @@ Requires-Dist: pydevd-pycharm; extra == "debug"
106
106
  Provides-Extra: sets
107
107
  Requires-Dist: pydantic-settings; extra == "sets"
108
108
  Requires-Dist: pydantic; extra == "sets"
109
+ Requires-Dist: yamlscript; extra == "sets"
110
+ Requires-Dist: pyyaml; extra == "sets"
109
111
  Provides-Extra: path-app
110
112
  Requires-Dist: appdirs; extra == "path-app"
111
113
  Provides-Extra: path-type
@@ -128,60 +130,60 @@ Requires-Dist: logfire[httpx]; extra == "http"
128
130
  Provides-Extra: setup
129
131
  Requires-Dist: setuptools; extra == "setup"
130
132
  Provides-Extra: all
131
- Requires-Dist: distributed; extra == "all"
133
+ Requires-Dist: torchvision; extra == "all"
134
+ Requires-Dist: docker; extra == "all"
135
+ Requires-Dist: yamlscript; extra == "all"
136
+ Requires-Dist: google-auth; extra == "all"
132
137
  Requires-Dist: pydevd-pycharm; extra == "all"
133
- Requires-Dist: ollama; extra == "all"
134
- Requires-Dist: sre_yield; extra == "all"
135
- Requires-Dist: flet-webview; extra == "all"
138
+ Requires-Dist: html2text; extra == "all"
136
139
  Requires-Dist: dask[bag]; extra == "all"
137
- Requires-Dist: semver; extra == "all"
138
- Requires-Dist: diskcache; extra == "all"
139
- Requires-Dist: tabulate; extra == "all"
140
- Requires-Dist: bokeh; extra == "all"
141
- Requires-Dist: openai; extra == "all"
142
- Requires-Dist: pytest-cov; extra == "all"
143
- Requires-Dist: uvicorn[standard]; extra == "all"
140
+ Requires-Dist: pydantic-ai[logfire,openai]; extra == "all"
141
+ Requires-Dist: json_repair; extra == "all"
144
142
  Requires-Dist: pyyaml; extra == "all"
143
+ Requires-Dist: dnspython[doh]; extra == "all"
144
+ Requires-Dist: contexttimer; extra == "all"
145
+ Requires-Dist: transformers[sentencepiece]; extra == "all"
146
+ Requires-Dist: filetype; extra == "all"
145
147
  Requires-Dist: Unidecode; extra == "all"
146
- Requires-Dist: json_repair; extra == "all"
147
- Requires-Dist: google-auth-oauthlib; extra == "all"
148
148
  Requires-Dist: pymupdf4llm; extra == "all"
149
- Requires-Dist: google-auth; extra == "all"
150
- Requires-Dist: fastapi; extra == "all"
151
- Requires-Dist: contexttimer; extra == "all"
152
- Requires-Dist: tinynetrc; extra == "all"
153
- Requires-Dist: flet-video; extra == "all"
154
- Requires-Dist: yamlscript; extra == "all"
149
+ Requires-Dist: flet-webview; extra == "all"
150
+ Requires-Dist: httpx_retries; extra == "all"
151
+ Requires-Dist: sentence_transformers; extra == "all"
152
+ Requires-Dist: tokenizers; extra == "all"
153
+ Requires-Dist: bokeh; extra == "all"
154
+ Requires-Dist: openpyxl; extra == "all"
155
155
  Requires-Dist: appdirs; extra == "all"
156
- Requires-Dist: logfire[httpx]; extra == "all"
156
+ Requires-Dist: openai; extra == "all"
157
+ Requires-Dist: sre_yield; extra == "all"
157
158
  Requires-Dist: huggingface_hub; extra == "all"
158
- Requires-Dist: transformers[sentencepiece]; extra == "all"
159
- Requires-Dist: torchvision; extra == "all"
160
- Requires-Dist: httpx_retries; extra == "all"
159
+ Requires-Dist: uvicorn[standard]; extra == "all"
160
+ Requires-Dist: fastapi; extra == "all"
161
+ Requires-Dist: deepmerge; extra == "all"
162
+ Requires-Dist: tabulate; extra == "all"
163
+ Requires-Dist: pydantic-settings; extra == "all"
161
164
  Requires-Dist: pymupdf; extra == "all"
162
- Requires-Dist: docker; extra == "all"
163
- Requires-Dist: regex; extra == "all"
164
- Requires-Dist: tokenizers; extra == "all"
165
- Requires-Dist: pydantic; extra == "all"
166
- Requires-Dist: logfire; extra == "all"
167
- Requires-Dist: filetype; extra == "all"
168
- Requires-Dist: logfire[fastapi]; extra == "all"
169
- Requires-Dist: html2text; extra == "all"
165
+ Requires-Dist: distributed; extra == "all"
166
+ Requires-Dist: faker; extra == "all"
167
+ Requires-Dist: peft; extra == "all"
170
168
  Requires-Dist: flet[all]; extra == "all"
171
- Requires-Dist: torchaudio; extra == "all"
172
- Requires-Dist: setuptools; extra == "all"
173
169
  Requires-Dist: httpx; extra == "all"
174
- Requires-Dist: dnspython[doh]; extra == "all"
175
- Requires-Dist: deepmerge; extra == "all"
170
+ Requires-Dist: semver; extra == "all"
171
+ Requires-Dist: tinynetrc; extra == "all"
176
172
  Requires-Dist: google-auth-httplib2; extra == "all"
177
- Requires-Dist: sentence_transformers; extra == "all"
178
- Requires-Dist: peft; extra == "all"
173
+ Requires-Dist: logfire; extra == "all"
179
174
  Requires-Dist: google-api-python-client; extra == "all"
180
- Requires-Dist: openpyxl; extra == "all"
181
- Requires-Dist: faker; extra == "all"
175
+ Requires-Dist: pydantic; extra == "all"
176
+ Requires-Dist: logfire[fastapi]; extra == "all"
177
+ Requires-Dist: ollama; extra == "all"
182
178
  Requires-Dist: pandas; extra == "all"
183
- Requires-Dist: pydantic-settings; extra == "all"
184
- Requires-Dist: pydantic-ai[logfire,openai]; extra == "all"
179
+ Requires-Dist: diskcache; extra == "all"
180
+ Requires-Dist: logfire[httpx]; extra == "all"
181
+ Requires-Dist: regex; extra == "all"
182
+ Requires-Dist: google-auth-oauthlib; extra == "all"
183
+ Requires-Dist: pytest-cov; extra == "all"
184
+ Requires-Dist: setuptools; extra == "all"
185
+ Requires-Dist: torchaudio; extra == "all"
186
+ Requires-Dist: flet-video; extra == "all"
185
187
  Dynamic: author
186
188
  Dynamic: author-email
187
189
  Dynamic: description
@@ -30,7 +30,7 @@ fmtr/tools/netrc_tools.py,sha256=PpNpz_mWlQi6VHGromKwFfTyLpHUXsd4LY6-OKLCbeI,376
30
30
  fmtr/tools/openai_tools.py,sha256=6SUgejgzUzmlKKct2_ePXntvMegu3FJgfk9x7aqtqYc,742
31
31
  fmtr/tools/packaging_tools.py,sha256=FlgOTnDRHZWQL2iR-wucTsyGEHRE-MlddKL30MPmUqE,253
32
32
  fmtr/tools/parallel_tools.py,sha256=QEb_gN1StkxsqYaH4HSjiJX8Y3gpb2uKNsOzG4uFpaM,3071
33
- fmtr/tools/pattern_tools.py,sha256=T9f2wVi_0lPuj5npcxs0yBU91LAgWGHwtrdN6rUZKm8,4357
33
+ fmtr/tools/pattern_tools.py,sha256=GzdhKt-nIHuKCGI9y3aGUaD5k9thbqbA9UXJoPPIcIY,5328
34
34
  fmtr/tools/pdf_tools.py,sha256=xvv9B84uAF81rFJRnXhSsxYuP42vY9ZdPVFrSMVe8G8,4069
35
35
  fmtr/tools/platform_tools.py,sha256=7p69CmAHe_sF68Fx9uVhns1k5EewTHTWgUYzkl6ZQKA,308
36
36
  fmtr/tools/process_tools.py,sha256=Ysh5Dk2QFBhXQerArjKdt7xZd3JrN5Ho02AaOjH0Nnw,1425
@@ -44,16 +44,16 @@ fmtr/tools/tabular_tools.py,sha256=tpIpZzYku1HcJrHZJL6BC39LmN3WUWVhFbK2N7nDVmE,1
44
44
  fmtr/tools/tokenization_tools.py,sha256=me-IBzSLyNYejLybwjO9CNB6Mj2NYfKPaOVThXyaGNg,4268
45
45
  fmtr/tools/tools.py,sha256=CAsApa1YwVdNE6H66Vjivs_mXYvOas3rh7fPELAnTpk,795
46
46
  fmtr/tools/unicode_tools.py,sha256=yS_9wpu8ogNoiIL7s1G_8bETFFO_YQlo4LNPv1NLDeY,52
47
- fmtr/tools/version,sha256=Cl1xpTAk2dakEAYnido_4VW6RMyhUhB5H7iuCcdJQmw,5
47
+ fmtr/tools/version,sha256=5xFfItk8jmF4J6XXD2GJpvj_7M8dldsmM0Uqpe1aihs,5
48
48
  fmtr/tools/version_tools.py,sha256=yNs_CGqWpqE4jbK9wsPIi14peJVXYbhIcMqHAFOw3yE,1480
49
49
  fmtr/tools/yaml_tools.py,sha256=9kuYChqJelWQIjGlSnK4iDdOWWH06P0gp9jIcRrC3UI,1903
50
50
  fmtr/tools/ai_tools/__init__.py,sha256=JZrLuOFNV1A3wvJgonxOgz_4WS-7MfCuowGWA5uYCjs,372
51
51
  fmtr/tools/ai_tools/agentic_tools.py,sha256=acSEPFS-aguDXanWGs3fAAlRyJSYPZW7L-Kb2qDLm-I,4300
52
52
  fmtr/tools/ai_tools/inference_tools.py,sha256=2UP2gXEyOJUjyyV6zmFIYmIxUsh1rXkRH0IbFvr2bRs,11908
53
53
  fmtr/tools/dns_tools/__init__.py,sha256=PwHxnpiy6_isQfUmz_5V1hL0dcPaA6ItqvoGWW8MOfk,222
54
- fmtr/tools/dns_tools/client.py,sha256=zYMUc63iFRSPtCnhqx_nvsHHf-VQrLt3Vor0qxImfLs,2390
54
+ fmtr/tools/dns_tools/client.py,sha256=omHdk9bA_8u2-VMQhh0c9r9e-oG4mZ0MWA9lfbtSEIc,2371
55
55
  fmtr/tools/dns_tools/dm.py,sha256=mvXacq6QJ86G0S0tkzJFhU7bOaSJytvsMNlxs5X9hfE,2236
56
- fmtr/tools/dns_tools/server.py,sha256=8oGCJZ-xTJhJ0gq21ro8Z0ZpfRYIdoWy7bLm9LOQcU0,2764
56
+ fmtr/tools/dns_tools/server.py,sha256=hSZhK4EZD6Ox4uRI3ldbnyyZf6DYgMUcTfffbrZN5pk,2329
57
57
  fmtr/tools/entrypoints/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
58
  fmtr/tools/entrypoints/cache_hfh.py,sha256=fQNs4J9twQuZH_Yj98-oOvEX7-LrSUP3kO8nzw2HrHs,60
59
59
  fmtr/tools/entrypoints/ep_test.py,sha256=B8HfWISfSgw_xVX475CbJGh_QnpOe9MH65H8qGjTWbY,46
@@ -73,9 +73,9 @@ fmtr/tools/tests/test_environment.py,sha256=iHaiMQfECYZPkPKwfuIZV9uHuWe3aE-p_dN_
73
73
  fmtr/tools/tests/test_json.py,sha256=IeSP4ziPvRcmS8kq7k9tHonC9rN5YYq9GSNT2ul6Msk,287
74
74
  fmtr/tools/tests/test_path.py,sha256=AkZQa6_8BQ-VaCyL_J-iKmdf2ZaM-xFYR37Kun3k4_g,2188
75
75
  fmtr/tools/tests/test_yaml.py,sha256=jc0TwwKu9eC0LvFGNMERdgBue591xwLxYXFbtsRwXVM,287
76
- fmtr_tools-1.2.5.dist-info/licenses/LICENSE,sha256=FW9aa6vVN5IjRQWLT43hs4_koYSmpcbIovlKeAJ0_cI,10757
77
- fmtr_tools-1.2.5.dist-info/METADATA,sha256=XH7DX192P5cYVOFAn4DR6Yh1lkAniyNoe6GWJA4p_ug,15943
78
- fmtr_tools-1.2.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
79
- fmtr_tools-1.2.5.dist-info/entry_points.txt,sha256=fSQrDGNctdQXbUxpMWYVfVQ0mhZMDyaEDG3D3a0zOSc,278
80
- fmtr_tools-1.2.5.dist-info/top_level.txt,sha256=LXem9xCgNOD72tE2gRKESdiQTL902mfFkwWb6-dlwEE,5
81
- fmtr_tools-1.2.5.dist-info/RECORD,,
76
+ fmtr_tools-1.2.7.dist-info/licenses/LICENSE,sha256=FW9aa6vVN5IjRQWLT43hs4_koYSmpcbIovlKeAJ0_cI,10757
77
+ fmtr_tools-1.2.7.dist-info/METADATA,sha256=3bmhaXPzz8Yt1zSVrOBrOU2i2kvwIwObkBpcP1rfRwE,16025
78
+ fmtr_tools-1.2.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
79
+ fmtr_tools-1.2.7.dist-info/entry_points.txt,sha256=fSQrDGNctdQXbUxpMWYVfVQ0mhZMDyaEDG3D3a0zOSc,278
80
+ fmtr_tools-1.2.7.dist-info/top_level.txt,sha256=LXem9xCgNOD72tE2gRKESdiQTL902mfFkwWb6-dlwEE,5
81
+ fmtr_tools-1.2.7.dist-info/RECORD,,