thds.mops 3.8.20250502184911__py3-none-any.whl → 3.8.20250602165457__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of thds.mops might be problematic. Click here for more details.

thds/mops/_compat.py CHANGED
@@ -4,3 +4,8 @@ try:
4
4
  import tomllib # type: ignore [import-not-found] # noqa: F401
5
5
  except ImportError:
6
6
  import tomli as tomllib # noqa: F401
7
+
8
+ try:
9
+ import importlib_metadata # type: ignore [import-not-found] # noqa: F401
10
+ except ImportError:
11
+ from importlib import metadata as importlib_metadata # type: ignore[no-redef] # noqa: F401
@@ -176,8 +176,11 @@ def parse_result_metadata(metadata_keyvals: ty.Sequence[str]) -> ResultMetadata:
176
176
  """
177
177
 
178
178
  def to_arg(kv: str) -> str:
179
- key, value = kv.split("=", 1)
180
- return f"--{key.replace('_', '-')}={value}"
179
+ try:
180
+ key, value = kv.split("=", 1)
181
+ return f"--{key.replace('_', '-')}={value}"
182
+ except ValueError:
183
+ raise ValueError(f"Unable to parse metadata key-value pair {kv}. Must be key=value.")
181
184
 
182
185
  metadata = result_metadata_parser().parse_args([to_arg(kv) for kv in metadata_keyvals if kv])
183
186
  return ResultMetadata(**vars(metadata))
@@ -1,4 +1,3 @@
1
- import importlib.metadata
2
1
  import io
3
2
  import typing as ty
4
3
  from pathlib import Path
@@ -6,6 +5,7 @@ from typing import Callable, Union
6
5
 
7
6
  from thds.adls import AdlsFqn, AdlsRoot
8
7
  from thds.core.stack_context import StackContext
8
+ from thds.mops._compat import importlib_metadata
9
9
 
10
10
  from ..adls.blob_store import get_adls_blob_store
11
11
  from .file_blob_store import get_file_blob_store
@@ -29,7 +29,7 @@ def register_blob_store(get_store: GetBlobStoreForUri) -> None:
29
29
 
30
30
 
31
31
  def load_plugin_blobstores() -> None:
32
- for entry_point in importlib.metadata.entry_points().get("thds.mops.pure.blob_stores", []):
32
+ for entry_point in importlib_metadata.entry_points(group="thds.mops.pure.blob_stores"):
33
33
  try:
34
34
  register_blob_store(entry_point.load())
35
35
  except Exception as e:
@@ -80,7 +80,7 @@ def gimme_bytes(pickle_dump: ty.Callable[[object, ty.IO], None], obj: object) ->
80
80
  return bio.read()
81
81
 
82
82
 
83
- def read_partial_pickle(full_bytes: bytes) -> ty.Tuple[bytes, ty.Any]:
83
+ def read_partial_pickle(full_bytes: bytes) -> ty.Tuple[bytes, bytes]:
84
84
  # in order to be forward-compatible with v3 of mops, we're introducing a new
85
85
  # wrinkle in the read. Instead of assuming that the data at the URI
86
86
  # _begins_ with a pickle, we are looking for the first possible pickle
@@ -89,10 +89,11 @@ def read_partial_pickle(full_bytes: bytes) -> ty.Tuple[bytes, ty.Any]:
89
89
  first_pickle_pos = full_bytes.find(b"\x80")
90
90
  if first_pickle_pos == -1:
91
91
  raise ValueError(f"Unable to find a pickle in bytes of length {len(full_bytes)}")
92
- return (
93
- full_bytes[:first_pickle_pos],
94
- CallableUnpickler(io.BytesIO(full_bytes[first_pickle_pos:])).load(),
95
- )
92
+ return full_bytes[:first_pickle_pos], full_bytes[first_pickle_pos:]
93
+
94
+
95
+ def _unpickle_with_callable(pickle_bytes: bytes) -> ty.Any:
96
+ return CallableUnpickler(io.BytesIO(pickle_bytes)).load()
96
97
 
97
98
 
98
99
  H = ty.TypeVar("H")
@@ -105,8 +106,8 @@ def make_read_header_and_object(
105
106
  uri_bytes = get_bytes(uri, type_hint=type_hint)
106
107
  if not uri_bytes:
107
108
  raise ValueError(f"{uri} exists but is empty - something is very wrong.")
108
- header, unpickled = read_partial_pickle(uri_bytes)
109
- return (xf_header or (lambda h: h))(header), unpickled # type: ignore
109
+ header, first_pickle = read_partial_pickle(uri_bytes)
110
+ return (xf_header or (lambda h: h))(header), _unpickle_with_callable(first_pickle) # type: ignore
110
111
 
111
112
  return read_object
112
113
 
@@ -0,0 +1,141 @@
1
+ import ast
2
+ import io
3
+ import pickletools
4
+ import re
5
+ import typing as ty
6
+
7
+ from thds.mops.pure.core import metadata, uris
8
+ from thds.mops.pure.core.memo import results
9
+ from thds.mops.pure.pickling._pickle import read_partial_pickle
10
+ from thds.mops.pure.runner import strings
11
+
12
+
13
+ def _dis(byts: bytes, indent: int = 4) -> str:
14
+ """Disassemble the bytes into a string."""
15
+ ios = io.StringIO()
16
+ pickletools.dis(byts, out=ios, indentlevel=indent)
17
+ ios.seek(0)
18
+ return ios.read()
19
+
20
+
21
+ def replace_all_nested_pickles(
22
+ disassembly_text: str,
23
+ ) -> str:
24
+ """
25
+ Finds all BINBYTES opcodes whose payload starts with b'\\x80',
26
+ attempts to disassemble them as pickles, and replaces the BINBYTES
27
+ section with the nested disassembly if successful.
28
+ """
29
+ # Regex to find any BINBYTES line and capture indentation and start offset.
30
+ binbytes_pattern = re.compile(
31
+ r"^(?P<indent>[ \t]*)(?P<offset>\d+):\s+(?P<opcode>\S+)\s+BINBYTES\s+", re.MULTILINE
32
+ )
33
+
34
+ # Regex to find the start of the next opcode line after BINBYTES
35
+ next_opcode_pattern = re.compile(r"^[ \t]*\d+:\s+\S+", re.MULTILINE)
36
+
37
+ output_parts = []
38
+ last_end = 0
39
+
40
+ for match in binbytes_pattern.finditer(disassembly_text):
41
+ indent_str = match.group("indent")
42
+ binbytes_line_start = match.start()
43
+ binbytes_line_end = match.end() # End of the matched BINBYTES prefix
44
+
45
+ # Find where the byte literal starts (b" or b') after the opcode
46
+ bytes_literal_start_index = -1
47
+ b_quote_match = re.search(r'b["\']', disassembly_text[binbytes_line_end:])
48
+ if b_quote_match:
49
+ bytes_literal_start_index = binbytes_line_end + b_quote_match.start()
50
+ else:
51
+ # Malformed BINBYTES line? Skip this match.
52
+ # Append text up to the start of this BINBYTES line and continue searching
53
+ output_parts.append(disassembly_text[last_end:binbytes_line_start])
54
+ last_end = binbytes_line_start # Start next search from here
55
+ continue
56
+
57
+ # Find the start of the *next* opcode line to delimit the byte literal
58
+ next_opcode_match = next_opcode_pattern.search(disassembly_text, pos=binbytes_line_end)
59
+ end_of_binbytes_section = (
60
+ next_opcode_match.start() if next_opcode_match else len(disassembly_text)
61
+ )
62
+
63
+ # Extract the full string representation of the bytes literal
64
+ potential_bytes_str = disassembly_text[
65
+ bytes_literal_start_index:end_of_binbytes_section
66
+ ].rstrip()
67
+
68
+ nested_disassembly = None
69
+ try:
70
+ # Evaluate the string literal to get bytes
71
+ actual_bytes = ast.literal_eval(potential_bytes_str)
72
+ if not isinstance(actual_bytes, bytes):
73
+ raise ValueError("Literal did not evaluate to bytes")
74
+
75
+ # --- Key Check: Does it start with a pickle protocol marker? ---
76
+ if actual_bytes.startswith(b"\x80"):
77
+ # Attempt to disassemble these bytes
78
+ indent_level = len(indent_str)
79
+ # Use a deeper indent for the nested part
80
+ nested_disassembly = _dis(actual_bytes, indent=indent_level + 4)
81
+
82
+ except (SyntaxError, ValueError, TypeError):
83
+ # Failed to parse the bytes literal string itself. Keep original.
84
+ # print(f"Debug: Failed to eval bytes literal near offset {match.group('offset')}: {e_eval}")
85
+ nested_disassembly = None # Ensure it stays None
86
+ except Exception: # Catch errors from _dis (e.g., not valid pickle)
87
+ # Failed to disassemble. Keep original.
88
+ # print(f"Debug: Failed to disassemble potential pickle near offset {match.group('offset')}: {e_dis}")
89
+ nested_disassembly = None # Ensure it stays None
90
+
91
+ # --- Construct the output ---
92
+ # Append text before this BINBYTES line
93
+ output_parts.append(disassembly_text[last_end:binbytes_line_start])
94
+
95
+ if nested_disassembly:
96
+ # Successfully disassembled, replace the BINBYTES section
97
+ # Append the original BINBYTES line itself (for context)
98
+ output_parts.append(
99
+ disassembly_text[binbytes_line_start:binbytes_line_end]
100
+ ) # Just the "XXX: B BINBYTES" part
101
+ output_parts.append(f"--- NESTED PICKLE ({len(actual_bytes)} bytes) START ---\n")
102
+ output_parts.append(nested_disassembly)
103
+ output_parts.append(f"{indent_str}--- NESTED PICKLE END ---\n")
104
+ # Update last_end to skip the original byte literal representation
105
+ last_end = end_of_binbytes_section
106
+ else:
107
+ # Did not replace, append the original BINBYTES section unchanged
108
+ output_parts.append(disassembly_text[binbytes_line_start:end_of_binbytes_section])
109
+ # Update last_end
110
+ last_end = end_of_binbytes_section
111
+
112
+ # Append any remaining text after the last match
113
+ output_parts.append(disassembly_text[last_end:])
114
+
115
+ return "".join(output_parts)
116
+
117
+
118
+ def get_meta_and_pickle(uri: str) -> tuple[ty.Optional[metadata.ResultMetadata], str]:
119
+ """To be used when the issue is internal to the pickle itself."""
120
+
121
+ def _replace_all_dis_numbers(
122
+ disassembly_text: str,
123
+ ) -> str:
124
+ # Replace all line numbers with a placeholder
125
+ lines = disassembly_text.splitlines()
126
+ return "\n".join([re.sub(r"^(\s*)\d+:", r"\1 ", line) for line in lines])
127
+
128
+ if uri.endswith("/" + strings.INVOCATION):
129
+ _, invoc_raw = read_partial_pickle(uris.get_bytes(uri, type_hint=strings.INVOCATION))
130
+ # the raw invocation itself contains a nested pickle. we want to show the outer opcodes of the
131
+ # raw invocation, and then we _also_ want to pull out the inner args_kwargs_pickle and show
132
+ # the opcodes of that one, preferably without repeating ourselves too much.
133
+ invoc_dis = _dis(invoc_raw)
134
+ return None, _replace_all_dis_numbers(replace_all_nested_pickles(invoc_dis))
135
+
136
+ # TODO maybe handle exception type hinting here?
137
+ meta_bytes, first_pickle = read_partial_pickle(uris.get_bytes(uri, type_hint=results.RESULT))
138
+ return (
139
+ metadata.parse_result_metadata(meta_bytes.decode("utf-8").split("\n")),
140
+ _replace_all_dis_numbers(_dis(first_pickle)),
141
+ )
@@ -8,9 +8,11 @@ but if you're reading this in the distant future - those are its limitations.
8
8
 
9
9
  import argparse
10
10
  import functools
11
+ import io
11
12
  import os
12
13
  import re
13
14
  import subprocess
15
+ import sys
14
16
  import typing as ty
15
17
  from dataclasses import dataclass
16
18
  from pathlib import Path
@@ -29,6 +31,8 @@ from thds.mops.pure.pickling._pickle import (
29
31
  from thds.mops.pure.pickling.pickles import Invocation
30
32
  from thds.mops.pure.runner import strings
31
33
 
34
+ from . import _pickle_dis
35
+
32
36
  logger = log.getLogger(__name__)
33
37
 
34
38
 
@@ -59,6 +63,7 @@ def _unpickle_object_for_debugging(uri: str) -> ty.Any:
59
63
  invoc = ty.cast(Invocation, invoc_raw)
60
64
  args, kwargs = unfreeze_args_kwargs(invoc.args_kwargs_pickle, PartialViewingUnpickler)
61
65
  return Thunk(getattr(invoc, "f", None) or invoc.func, *args, **kwargs)
66
+
62
67
  header, obj = read_metadata_and_object("output", uri)
63
68
  return obj, header
64
69
  except ImportError as ie:
@@ -98,7 +103,7 @@ def _control_uri(uri: str) -> str:
98
103
 
99
104
 
100
105
  @scope.bound
101
- def get_control_file(uri: str) -> ty.Any:
106
+ def get_control_file(uri: str, unpickle: bool = True) -> ty.Any:
102
107
  """Returns _NOTHING if 'normal' errors occur."""
103
108
  try:
104
109
  uri = _resolved_uri(uri)
@@ -109,18 +114,27 @@ def get_control_file(uri: str) -> ty.Any:
109
114
  if not _control_uri(uri):
110
115
  fs = uris.lookup_blob_store(uri)
111
116
  logger.debug(f"Attempting to fetch all control files for {uri}")
112
- return IRE(**{cf: get_control_file(fs.join(uri, cf)) for cf in _KNOWN_CONTROL_FILES})
117
+ return IRE(
118
+ **{cf: get_control_file(fs.join(uri, cf), unpickle=unpickle) for cf in _KNOWN_CONTROL_FILES}
119
+ )
113
120
 
114
121
  has_storage_root = bool(uris.ACTIVE_STORAGE_ROOT())
115
122
  try:
116
123
  scope.enter(uris.ACTIVE_STORAGE_ROOT.set(uris.get_root(uri)))
117
- return _unpickle_object_for_debugging(uri)
124
+ if unpickle:
125
+ return _unpickle_object_for_debugging(uri)
126
+ else:
127
+ return _pickle_dis.get_meta_and_pickle(uri)
128
+ except ImportError:
129
+ return None
118
130
  except Exception as e:
119
131
  if uris.lookup_blob_store(uri).is_blob_not_found(e):
120
132
  if has_storage_root or uri not in str(e):
121
133
  logger.warning(str(e))
122
134
  return None
123
- logger.exception("Unexpected error while unpickling the object.")
135
+ logger.exception(
136
+ f"Unexpected error {e} while {'unpickling' if unpickle else 'processing'} the object at {uri}"
137
+ )
124
138
  raise
125
139
 
126
140
 
@@ -136,22 +150,31 @@ def _embed(o: object) -> None:
136
150
 
137
151
 
138
152
  def _pprint(obj: object, file: ty.Any = None, uri: str = "") -> None:
153
+ final_out_stream = file or sys.stdout
154
+
139
155
  if uri:
140
- print(uri, file=file)
156
+ print(uri, file=final_out_stream)
157
+
158
+ # Always capture the pretty-printed output to an in-memory buffer first
159
+ output_buffer = io.StringIO()
141
160
 
142
161
  try:
143
- from rich import console, pretty # type: ignore[import]
144
-
145
- if file:
146
- console.Console(file=file, color_system=None).print(
147
- pretty.Pretty(
148
- obj, # highlighter=lambda x: x if file else None
149
- )
150
- )
151
- else:
152
- pretty.pprint(obj)
162
+ # Attempt to use rich for pretty-printing into the buffer
163
+ from rich import console, pretty # type: ignore[import-not-found]
164
+
165
+ console.Console(file=output_buffer, color_system=None).print(pretty.Pretty(obj), crop=False)
153
166
  except ModuleNotFoundError:
154
- pprint(obj, indent=4, width=60, sort_dicts=False, stream=file)
167
+ pprint(obj, indent=4, width=60, sort_dicts=False, stream=output_buffer)
168
+
169
+ formatted_string = output_buffer.getvalue()
170
+ # Unescape the literal '\n' sequences into actual newlines
171
+ processed_string = re.sub(r"(?<!\\)\\n", "\n", formatted_string)
172
+
173
+ # Use print with end='' for stdout to avoid double newlines
174
+ if final_out_stream is sys.stdout:
175
+ print(processed_string, end="")
176
+ else:
177
+ final_out_stream.write(processed_string)
155
178
 
156
179
 
157
180
  def inspect(uri: str, embed: bool = False) -> ty.Any:
@@ -255,6 +278,25 @@ def _write_ire_to_path(ire: IRE, path: Path, uri: str) -> None:
255
278
  _pprint(ire, file=wf, uri=uri)
256
279
 
257
280
 
281
+ @scope.bound
282
+ def pickle_diff_two_uris(uri1: str, uri2: str) -> None:
283
+ """Diff two pickled objects, using the diff tool specified in DIFF_TOOL."""
284
+ _check_diff_tool()
285
+ uri1 = _resolved_uri(uri1)
286
+ uri2 = _resolved_uri(uri2)
287
+
288
+ path1 = scope.enter(tmp.temppath_same_fs())
289
+ path2 = scope.enter(tmp.temppath_same_fs())
290
+
291
+ ire1 = get_control_file(uri1, unpickle=False)
292
+ ire2 = get_control_file(uri2, unpickle=False)
293
+
294
+ _write_ire_to_path(ire1, path1, uri1)
295
+ _write_ire_to_path(ire2, path2, uri2)
296
+
297
+ _run_diff_tool(path1, path2)
298
+
299
+
258
300
  def _diff_memospace(uri: str, new_control: IRE) -> None:
259
301
  """Diff all siblings in the memospace against the new invocation.
260
302
 
@@ -347,6 +389,11 @@ def main() -> None:
347
389
  " It is highly recommended that you `brew install difftastic` to get more precise diffs."
348
390
  ),
349
391
  )
392
+ parser.add_argument(
393
+ "--diff-pickle-ops",
394
+ "-p",
395
+ help="""Diff against the provided memo URI, but emit pickle opcodes rather than unpickling.""",
396
+ )
350
397
  parser.add_argument(
351
398
  "--loop",
352
399
  action="store_true",
@@ -355,16 +402,22 @@ def main() -> None:
355
402
  parser.add_argument("--embed", action="store_true", help="Embed an IPython shell after inspection.")
356
403
  args = parser.parse_args()
357
404
  args.uri = args.uri.rstrip("/")
358
- if args.diff_memospace:
405
+ if args.diff_memospace or args.diff_pickle_ops:
359
406
  _check_diff_tool()
360
407
 
361
- _inspect_uri(args.uri, args.diff_memospace, args.embed)
408
+ if args.diff_pickle_ops:
409
+ pickle_diff_two_uris(args.uri, args.diff_pickle_ops)
410
+ else:
411
+ _inspect_uri(args.uri, args.diff_memospace, args.embed)
362
412
 
363
413
  if args.loop:
364
414
  prompt = "\nEnter another URI to inspect, or empty string to exit: "
365
415
  uri = input(prompt)
366
416
  while uri:
367
- _inspect_uri(uri, args.diff_memospace, args.embed)
417
+ if args.diff_pickle_ops:
418
+ pickle_diff_two_uris(args.uri, uri)
419
+ else:
420
+ _inspect_uri(uri, args.diff_memospace, args.embed)
368
421
  uri = input(prompt)
369
422
 
370
423
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: thds.mops
3
- Version: 3.8.20250502184911
3
+ Version: 3.8.20250602165457
4
4
  Summary: ML Ops tools for Trilliant Health
5
5
  Author-email: Trilliant Health <info@trillianthealth.com>
6
6
  Project-URL: Repository, https://github.com/TrilliantHealth/ds-monorepo
@@ -11,6 +11,7 @@ Requires-Dist: azure-core
11
11
  Requires-Dist: azure-identity
12
12
  Requires-Dist: azure-storage-file-datalake
13
13
  Requires-Dist: cachetools
14
+ Requires-Dist: importlib_metadata>=3.6; python_version < "3.10"
14
15
  Requires-Dist: tblib~=2.0
15
16
  Requires-Dist: thds-adls
16
17
  Requires-Dist: thds-core
@@ -1,6 +1,6 @@
1
1
  thds/mops/__about__.py,sha256=IW_3wy8wEdrVducoBdiVgD7oYOY4J8yO1ezBaPtrc6U,215
2
2
  thds/mops/__init__.py,sha256=dbujDxVVfHpWP7OyfjEdNVHLtKx99rsNQPYfjTKn5Lg,127
3
- thds/mops/_compat.py,sha256=nArultEBSfWeT8eJOETN-H3vd4miEhZBXwjc6cCCtTc,157
3
+ thds/mops/_compat.py,sha256=fO1YYEu6LF1re-VXl4P_8RXXLeKt4BgI9NTlHTgNpLk,357
4
4
  thds/mops/config.py,sha256=T62YskXvzAfxNgpq2jMatHgoIHfRV_z4cvJ8Rl_TZ6E,2015
5
5
  thds/mops/parallel.py,sha256=FIWm53NJF1X910sTUGhqYj_XJWaSzgEDdsZq9siDvhk,926
6
6
  thds/mops/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -47,7 +47,7 @@ thds/mops/pure/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
47
47
  thds/mops/pure/core/content_addressed.py,sha256=OQvEzRl9RWUCPCISUEv6kIUEroPGKlsAuBqTrV2tkx4,1101
48
48
  thds/mops/pure/core/deferred_work.py,sha256=3vjfqWFlqLLMcmX4nHVaaiidrG5N5KyAYhw6R0hoMzI,3716
49
49
  thds/mops/pure/core/file_blob_store.py,sha256=N4m4LLrBZaqTJFR4D_eYl03a-n6yQBRsv0ID1bOS9TA,4298
50
- thds/mops/pure/core/metadata.py,sha256=1BFtRVESTpgDQs2mMvBIWJ4W5oPN19qxcgrILgE3_Tg,8130
50
+ thds/mops/pure/core/metadata.py,sha256=xAL2iz0pXrcKapmYnNrqSZ8nH2GVakA167NSpAfwiCI,8276
51
51
  thds/mops/pure/core/output_naming.py,sha256=ntufOVNJiVPiUM-Azl9mFpDFhIxiB-V2je9dv9AUQhg,2283
52
52
  thds/mops/pure/core/partial.py,sha256=aeNQFNHj9epU6lvk6NNTV6hXkNqNHN_czBydt7nkHmg,463
53
53
  thds/mops/pure/core/pipeline_id.py,sha256=rQP6uhwP-2rqpOxQiEOfQbRdQL0q8zSGJrb7YywlO-A,2102
@@ -57,7 +57,7 @@ thds/mops/pure/core/serialize_big_objs.py,sha256=YcOS1ccs82ZWO7nTbeumErMzYVe4hgX
57
57
  thds/mops/pure/core/serialize_paths.py,sha256=bWI-AKNP_Tf29JGO7DKqshOh7b7gu51lfGryDXo3aMI,5787
58
58
  thds/mops/pure/core/source.py,sha256=f7qtgKE5q75_uq27mgtIDGMWSvakzzpB3kAsTKo4TWw,13549
59
59
  thds/mops/pure/core/types.py,sha256=w2g83miGhnjaWr2_4TW2Fc3BdIgoIHFbIr_wX1HC7A0,5452
60
- thds/mops/pure/core/uris.py,sha256=oRiBM92xFwhbFSf4OIjJYQMmDGTRglndb_n4lpilsbs,2675
60
+ thds/mops/pure/core/uris.py,sha256=qO9_f-ro7kax6haNOPTPe81-_aUSRFELeeZH4PMTTU4,2694
61
61
  thds/mops/pure/core/use_runner.py,sha256=_YeKEjj6_9uc5UIjxcm-YKLUj4joApOdaTJCMaCLC2c,1547
62
62
  thds/mops/pure/core/entry/__init__.py,sha256=kiDcsj16CwjRSexOZW-4h4b4tDCYIS_eLS5wgu2yIlk,151
63
63
  thds/mops/pure/core/entry/main.py,sha256=H5NHl2WgKN-3czlDjJeJLhyXOZ38c2ixGTbh6T3SfgQ,1806
@@ -82,7 +82,7 @@ thds/mops/pure/joblib/__init__.py,sha256=-3hSs-GsNzE_eNnwrdZBHAR_eaub5Uyl5GPYqBw
82
82
  thds/mops/pure/joblib/backend.py,sha256=F__6lrdc1-VcX4n4Pw7Lz1bBgeefShtRy2DQh6Fp-eI,2671
83
83
  thds/mops/pure/joblib/batching.py,sha256=tPOATD28-YW7KcWa3IqKm-fhLaILzM792ApvU-_zfnM,2298
84
84
  thds/mops/pure/pickling/__init__.py,sha256=WNdG8PdJCk-kYaXkvvPa--hjYGoUlBXG3w2X86yuhGo,156
85
- thds/mops/pure/pickling/_pickle.py,sha256=oBt2LX3_Bm33lFmQiOdQq0zIdPjDmPY8je2ICWgUQbo,7514
85
+ thds/mops/pure/pickling/_pickle.py,sha256=vn8f6uEsaAdLyxGNYb4ED6D1a6BXsZQxnV3c0Ya6WUk,7605
86
86
  thds/mops/pure/pickling/memoize_only.py,sha256=oI5CMy6IEJc46Gb_BGWNUuAe3fysS7HxRSTajN0WssI,837
87
87
  thds/mops/pure/pickling/mprunner.py,sha256=dVbwQA8hzEL7UiwYXmzoGwN3_jbEtGoHDPMkRmo_UtA,8378
88
88
  thds/mops/pure/pickling/pickles.py,sha256=nCg7L7CqReNWDF8FAdEmCcuXVC_kLT5zuyW3V8Vvvs4,4704
@@ -95,8 +95,9 @@ thds/mops/pure/runner/simple_shims.py,sha256=oJ8sC5EVD-JFZx8CYE3_QwaQTuFa5F3IYH5
95
95
  thds/mops/pure/runner/strings.py,sha256=PYAYMxZ2ehgahKIBXJilENNE6OrdNkueNBel8LPsoh8,26
96
96
  thds/mops/pure/runner/types.py,sha256=sdeGCig5a-tm4eHrpMCTFsrmh2CBrLfI3kCMdoYqZY0,1127
97
97
  thds/mops/pure/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
98
+ thds/mops/pure/tools/_pickle_dis.py,sha256=EyLgWP_dRzz1HIabGRTEGZFT_LZV5gmn4asJyFUAt4Y,6312
98
99
  thds/mops/pure/tools/history.py,sha256=dB7C2jq-0P3Fnv5Q3nzEkLehXdX0kaZZrGl1U1ns9DU,1048
99
- thds/mops/pure/tools/inspect.py,sha256=nvEQqZwWvBAc3wvfYVUWPkT4tH_v0O2jUo8eAJPC9Fc,12190
100
+ thds/mops/pure/tools/inspect.py,sha256=PYXmR9-ATB3UsJUDG5Up8B6mdfenOGx2nktjw3sxMX8,13957
100
101
  thds/mops/pure/tools/sha256_b64_addressed.py,sha256=SECAiw3xSqpsrBBZix0MgJRTQrbHiUk2oFHYa7ln3q4,1137
101
102
  thds/mops/pure/tools/stress.py,sha256=f7pL5n9BmVYSZrmDJxKnUC70AIfeHhU5B9E9UDs5GJ8,2544
102
103
  thds/mops/pure/tools/summarize/__init__.py,sha256=MSmt_5Xg84uHqzTN38JwgseJK8rsJn_11A8WD99VtEo,61
@@ -104,8 +105,8 @@ thds/mops/pure/tools/summarize/cli.py,sha256=gaechsJhRZsOxGJGG1dQsW5dMBlgSv2sUmE
104
105
  thds/mops/pure/tools/summarize/run_summary.py,sha256=ujJC24J0XsF5W5P-eHiIq-4gmedmFXk2g1uljuvqOvc,5373
105
106
  thds/mops/testing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
106
107
  thds/mops/testing/deferred_imports.py,sha256=f0ezCgQAtzTqW1yAOb0OWgsB9ZrlztLB894LtpWDaVw,3780
107
- thds_mops-3.8.20250502184911.dist-info/METADATA,sha256=bYjWQQeZfb_gt-VtXg2ngfEMijrCeMKlR35qFc4GW2c,2158
108
- thds_mops-3.8.20250502184911.dist-info/WHEEL,sha256=wXxTzcEDnjrTwFYjLPcsW_7_XihufBwmpiBeiXNBGEA,91
109
- thds_mops-3.8.20250502184911.dist-info/entry_points.txt,sha256=GShNqjcjbq0TAJuwpyeCI5XCltiwdZxnNHkBpmYbNkU,329
110
- thds_mops-3.8.20250502184911.dist-info/top_level.txt,sha256=LTZaE5SkWJwv9bwOlMbIhiS-JWQEEIcjVYnJrt-CriY,5
111
- thds_mops-3.8.20250502184911.dist-info/RECORD,,
108
+ thds_mops-3.8.20250602165457.dist-info/METADATA,sha256=ywsEaxLyt2BegJ_UbgTVQUVXsWtrM5PnOFFpX0IOkhk,2222
109
+ thds_mops-3.8.20250602165457.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
110
+ thds_mops-3.8.20250602165457.dist-info/entry_points.txt,sha256=GShNqjcjbq0TAJuwpyeCI5XCltiwdZxnNHkBpmYbNkU,329
111
+ thds_mops-3.8.20250602165457.dist-info/top_level.txt,sha256=LTZaE5SkWJwv9bwOlMbIhiS-JWQEEIcjVYnJrt-CriY,5
112
+ thds_mops-3.8.20250602165457.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.1.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5