thds.core 1.46.20251009215019__py3-none-any.whl → 1.47.20251107140605__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- thds/core/inspect.py +26 -1
- thds/core/log/__init__.py +1 -1
- thds/core/log/kw_logger.py +10 -0
- thds/core/parallel.py +40 -24
- thds/core/sqlite/index.py +11 -7
- {thds_core-1.46.20251009215019.dist-info → thds_core-1.47.20251107140605.dist-info}/METADATA +1 -1
- {thds_core-1.46.20251009215019.dist-info → thds_core-1.47.20251107140605.dist-info}/RECORD +10 -10
- {thds_core-1.46.20251009215019.dist-info → thds_core-1.47.20251107140605.dist-info}/WHEEL +0 -0
- {thds_core-1.46.20251009215019.dist-info → thds_core-1.47.20251107140605.dist-info}/entry_points.txt +0 -0
- {thds_core-1.46.20251009215019.dist-info → thds_core-1.47.20251107140605.dist-info}/top_level.txt +0 -0
thds/core/inspect.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import inspect
|
|
2
2
|
import typing as ty
|
|
3
3
|
from dataclasses import dataclass
|
|
4
|
+
from types import ModuleType
|
|
4
5
|
|
|
5
6
|
|
|
6
7
|
@dataclass(frozen=True)
|
|
@@ -32,7 +33,7 @@ def get_caller_info(skip: int = 2) -> CallerInfo:
|
|
|
32
33
|
start = 0 + skip
|
|
33
34
|
if len(stack) < start + 1:
|
|
34
35
|
raise RuntimeError(f"The stack has less than f{skip} + 1 frames in it.")
|
|
35
|
-
parentframe = stack[start]
|
|
36
|
+
parentframe = stack[start].frame
|
|
36
37
|
|
|
37
38
|
# full dotted name of caller module
|
|
38
39
|
module_info = inspect.getmodule(parentframe)
|
|
@@ -68,3 +69,27 @@ def bind_arguments(
|
|
|
68
69
|
|
|
69
70
|
def get_argument(arg_name: str, bound_arguments: inspect.BoundArguments) -> ty.Any:
|
|
70
71
|
return bound_arguments.arguments[arg_name]
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def yield_caller_modules_and_frames(*skip: str) -> ty.Iterator[tuple[ModuleType, inspect.FrameInfo]]:
|
|
75
|
+
"""Yields caller modules and their frame info, skipping any modules in the skip list."""
|
|
76
|
+
stack = inspect.stack()
|
|
77
|
+
skip = set(skip) | {__name__} # type: ignore
|
|
78
|
+
for frame_info in stack[1:]: # don't bother with the current frame, obviously
|
|
79
|
+
module = inspect.getmodule(frame_info.frame)
|
|
80
|
+
if module:
|
|
81
|
+
module_name = module.__name__
|
|
82
|
+
if module_name not in skip:
|
|
83
|
+
yield module, frame_info
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def caller_module_name(*skip: str) -> str:
|
|
87
|
+
"""
|
|
88
|
+
Find the first caller module that is not in the skip list.
|
|
89
|
+
:param skip: module names to skip
|
|
90
|
+
:return: the first caller module name not in skip, or empty string if no module can be found
|
|
91
|
+
"""
|
|
92
|
+
for module, _frame in yield_caller_modules_and_frames(*skip):
|
|
93
|
+
return module.__name__
|
|
94
|
+
|
|
95
|
+
return "" # this is trivially distinguishable from a module name, so no need to force people to handle None
|
thds/core/log/__init__.py
CHANGED
|
@@ -26,4 +26,4 @@ logger.info("testing 5")
|
|
|
26
26
|
|
|
27
27
|
from .basic_config import DuplicateFilter, set_logger_to_console_level # noqa: F401
|
|
28
28
|
from .kw_formatter import ThdsCompactFormatter # noqa: F401
|
|
29
|
-
from .kw_logger import KwLogger, getLogger, logger_context, make_th_formatters_safe # noqa: F401
|
|
29
|
+
from .kw_logger import KwLogger, auto, getLogger, logger_context, make_th_formatters_safe # noqa: F401
|
thds/core/log/kw_logger.py
CHANGED
|
@@ -80,6 +80,16 @@ def getLogger(name: Optional[str] = None) -> logging.LoggerAdapter:
|
|
|
80
80
|
return KwLogger(logging.getLogger(name), dict())
|
|
81
81
|
|
|
82
82
|
|
|
83
|
+
def auto(*skip: str) -> logging.LoggerAdapter:
|
|
84
|
+
from .. import inspect
|
|
85
|
+
|
|
86
|
+
module_name = inspect.caller_module_name(__name__, *skip)
|
|
87
|
+
if not module_name:
|
|
88
|
+
raise ValueError("Cannot automatically determine caller module name for logger.")
|
|
89
|
+
|
|
90
|
+
return getLogger(module_name)
|
|
91
|
+
|
|
92
|
+
|
|
83
93
|
def make_th_formatters_safe(logger: logging.Logger):
|
|
84
94
|
"""Non-adapted loggers may still run into our root format string,
|
|
85
95
|
which expects _TH_REC_CTXT to be present on every LogRecord.
|
thds/core/parallel.py
CHANGED
|
@@ -8,7 +8,7 @@ from collections import defaultdict
|
|
|
8
8
|
from dataclasses import dataclass
|
|
9
9
|
from uuid import uuid4
|
|
10
10
|
|
|
11
|
-
from thds.core import concurrency, config, files, log
|
|
11
|
+
from thds.core import concurrency, config, files, inspect, log
|
|
12
12
|
|
|
13
13
|
PARALLEL_OFF = config.item("off", default=False, parse=config.tobool)
|
|
14
14
|
# if you want to simplify a stack trace, this may be your friend
|
|
@@ -17,9 +17,6 @@ R = ty.TypeVar("R")
|
|
|
17
17
|
T_co = ty.TypeVar("T_co", covariant=True)
|
|
18
18
|
|
|
19
19
|
|
|
20
|
-
logger = log.getLogger(__name__)
|
|
21
|
-
|
|
22
|
-
|
|
23
20
|
class IterableWithLen(ty.Protocol[T_co]):
|
|
24
21
|
def __iter__(self) -> ty.Iterator[T_co]: ... # pragma: no cover
|
|
25
22
|
|
|
@@ -69,13 +66,21 @@ class Error:
|
|
|
69
66
|
H = ty.TypeVar("H", bound=ty.Hashable)
|
|
70
67
|
|
|
71
68
|
|
|
69
|
+
def _get_caller_logger(named: str) -> ty.Callable[[str], ty.Any]:
|
|
70
|
+
module_name = inspect.caller_module_name(__name__)
|
|
71
|
+
if module_name:
|
|
72
|
+
return log.getLogger(module_name).info if named else log.getLogger(module_name).debug
|
|
73
|
+
return log.getLogger(__name__).debug # if not named, we default to debug level
|
|
74
|
+
|
|
75
|
+
|
|
72
76
|
def yield_all(
|
|
73
77
|
thunks: ty.Iterable[ty.Tuple[H, ty.Callable[[], R]]],
|
|
74
78
|
*,
|
|
75
79
|
executor_cm: ty.Optional[ty.ContextManager[concurrent.futures.Executor]] = None,
|
|
76
80
|
fmt: ty.Callable[[str], str] = lambda x: x,
|
|
81
|
+
error_fmt: ty.Callable[[str], str] = lambda x: x,
|
|
77
82
|
named: str = "",
|
|
78
|
-
progress_logger: ty.Callable[[str], ty.Any] =
|
|
83
|
+
progress_logger: ty.Optional[ty.Callable[[str], ty.Any]] = None,
|
|
79
84
|
) -> ty.Iterator[ty.Tuple[H, ty.Union[R, Error]]]:
|
|
80
85
|
"""Stream your results so that you don't have to load them all into memory at the same
|
|
81
86
|
time (necessarily). Also, yield (rather than raise) Exceptions, wrapped as Errors.
|
|
@@ -94,7 +99,6 @@ def yield_all(
|
|
|
94
99
|
len_or_none = try_len(thunks)
|
|
95
100
|
|
|
96
101
|
num_tasks_log = "" if not len_or_none else f" of {len_or_none}"
|
|
97
|
-
named = f" {named} " if named else " result "
|
|
98
102
|
|
|
99
103
|
if PARALLEL_OFF() or (len_or_none == 1 and not executor_cm):
|
|
100
104
|
# don't actually transfer this to an executor we only have one task.
|
|
@@ -105,6 +109,8 @@ def yield_all(
|
|
|
105
109
|
yield key, Error(e)
|
|
106
110
|
return # we're done here
|
|
107
111
|
|
|
112
|
+
progress_logger = progress_logger or _get_caller_logger(named)
|
|
113
|
+
|
|
108
114
|
executor_cm = executor_cm or concurrent.futures.ThreadPoolExecutor(
|
|
109
115
|
max_workers=len_or_none or None, **concurrency.initcontext()
|
|
110
116
|
) # if len_or_none turns out to be zero, swap in a None which won't kill the executor
|
|
@@ -114,15 +120,21 @@ def yield_all(
|
|
|
114
120
|
# While concurrent.futures.as_completed accepts an iterable as input, it
|
|
115
121
|
# does not yield any completed futures until the input iterable is
|
|
116
122
|
# exhausted.
|
|
123
|
+
num_exceptions = 0
|
|
117
124
|
for i, future in enumerate(concurrent.futures.as_completed(keys_onto_futures.values()), start=1):
|
|
118
125
|
thunk_key = future_ids_onto_keys[id(future)]
|
|
126
|
+
error_suffix = (
|
|
127
|
+
error_fmt(f"; {num_exceptions} tasks have raised exceptions") if num_exceptions else ""
|
|
128
|
+
)
|
|
119
129
|
try:
|
|
120
130
|
result = future.result()
|
|
121
131
|
yielder: tuple[H, ty.Union[R, Error]] = thunk_key, ty.cast(R, result)
|
|
132
|
+
name = named or result.__class__.__name__
|
|
122
133
|
except Exception as e:
|
|
123
134
|
yielder = thunk_key, Error(e)
|
|
135
|
+
name = named or e.__class__.__name__
|
|
124
136
|
finally:
|
|
125
|
-
progress_logger(fmt(f"Yielding{
|
|
137
|
+
progress_logger(fmt(f"Yielding {name} {i}{num_tasks_log}") + error_suffix)
|
|
126
138
|
yield yielder
|
|
127
139
|
|
|
128
140
|
|
|
@@ -136,8 +148,10 @@ def failfast(results: ty.Iterable[ty.Tuple[H, ty.Union[R, Error]]]) -> ty.Iterat
|
|
|
136
148
|
yield key, res
|
|
137
149
|
|
|
138
150
|
|
|
139
|
-
def xf_mapping(
|
|
140
|
-
|
|
151
|
+
def xf_mapping(
|
|
152
|
+
thunks: ty.Mapping[H, ty.Callable[[], R]], named: str = ""
|
|
153
|
+
) -> ty.Iterator[ty.Tuple[H, R]]:
|
|
154
|
+
return failfast(yield_all(IteratorWithLen(len(thunks), thunks.items()), named=named))
|
|
141
155
|
|
|
142
156
|
|
|
143
157
|
def create_keys(iterable: ty.Iterable[R]) -> ty.Iterator[ty.Tuple[str, R]]:
|
|
@@ -152,6 +166,9 @@ def create_keys(iterable: ty.Iterable[R]) -> ty.Iterator[ty.Tuple[str, R]]:
|
|
|
152
166
|
return iter(with_keys)
|
|
153
167
|
|
|
154
168
|
|
|
169
|
+
ERROR_LOGGER = log.getLogger(__name__)
|
|
170
|
+
|
|
171
|
+
|
|
155
172
|
def yield_results(
|
|
156
173
|
thunks: ty.Iterable[ty.Callable[[], R]],
|
|
157
174
|
*,
|
|
@@ -159,9 +176,9 @@ def yield_results(
|
|
|
159
176
|
error_fmt: ty.Callable[[str], str] = lambda x: x,
|
|
160
177
|
success_fmt: ty.Callable[[str], str] = lambda x: x,
|
|
161
178
|
named: str = "",
|
|
162
|
-
progress_logger: ty.Callable[[str], ty.Any] =
|
|
179
|
+
progress_logger: ty.Optional[ty.Callable[[str], ty.Any]] = None,
|
|
163
180
|
) -> ty.Iterator[R]:
|
|
164
|
-
"""Yield only the successful results of your Callables/Thunks.
|
|
181
|
+
"""Yield only the successful results of your Callables/Thunks. Continue despite errors.
|
|
165
182
|
|
|
166
183
|
If your iterable has a length, we will be able to log progress
|
|
167
184
|
information. In most cases, this will be advantageous for you.
|
|
@@ -176,27 +193,26 @@ def yield_results(
|
|
|
176
193
|
|
|
177
194
|
exceptions: ty.List[Exception] = list()
|
|
178
195
|
|
|
179
|
-
num_tasks = try_len(thunks)
|
|
180
|
-
num_tasks_log = "" if not num_tasks else f" of {num_tasks}"
|
|
181
|
-
named = f" {named} " if named else " result "
|
|
182
|
-
|
|
183
196
|
for i, (_key, res) in enumerate(
|
|
184
|
-
yield_all(
|
|
197
|
+
yield_all(
|
|
198
|
+
create_keys(thunks),
|
|
199
|
+
executor_cm=executor_cm,
|
|
200
|
+
named=named,
|
|
201
|
+
progress_logger=progress_logger,
|
|
202
|
+
fmt=success_fmt,
|
|
203
|
+
error_fmt=error_fmt,
|
|
204
|
+
),
|
|
185
205
|
start=1,
|
|
186
206
|
):
|
|
187
207
|
if not isinstance(res, Error):
|
|
188
|
-
errors = error_fmt(f"; {len(exceptions)} tasks have raised exceptions") if exceptions else ""
|
|
189
|
-
progress_logger(success_fmt(f"Yielding{named}{i}{num_tasks_log} {errors}"))
|
|
190
208
|
yield res
|
|
191
209
|
else:
|
|
192
210
|
exceptions.append(res.error)
|
|
193
211
|
# print tracebacks as we go, so as not to defer potentially-helpful
|
|
194
212
|
# debugging information while a long run is ongoing.
|
|
195
213
|
traceback.print_exception(type(res.error), res.error, res.error.__traceback__)
|
|
196
|
-
|
|
197
|
-
error_fmt(
|
|
198
|
-
f"Task {i}{num_tasks_log} errored with {type(res.error).__name__}({res.error})"
|
|
199
|
-
)
|
|
214
|
+
ERROR_LOGGER.error( # should only use logger.exception from an except block
|
|
215
|
+
error_fmt(f"Task {i} errored with {type(res.error).__name__}({res.error})")
|
|
200
216
|
)
|
|
201
217
|
|
|
202
218
|
summarize_exceptions(error_fmt, exceptions)
|
|
@@ -217,10 +233,10 @@ def summarize_exceptions(
|
|
|
217
233
|
most_common_type = None
|
|
218
234
|
max_count = 0
|
|
219
235
|
for _type, excs in by_type.items():
|
|
220
|
-
|
|
236
|
+
ERROR_LOGGER.error(error_fmt(f"{len(excs)} tasks failed with exception: " + _type.__name__))
|
|
221
237
|
if len(excs) > max_count:
|
|
222
238
|
max_count = len(excs)
|
|
223
239
|
most_common_type = _type
|
|
224
240
|
|
|
225
|
-
|
|
241
|
+
ERROR_LOGGER.info("Raising one of the most common exception type.")
|
|
226
242
|
raise by_type[most_common_type][0] # type: ignore
|
thds/core/sqlite/index.py
CHANGED
|
@@ -4,19 +4,23 @@ from .connect import Connectable, autoconn_scope, autoconnect
|
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
@autoconn_scope.bound
|
|
7
|
-
def create(
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
"""Create an index on a table in a SQLite database using only sqlite3 and SQL.
|
|
7
|
+
def create(
|
|
8
|
+
connectable: Connectable, table_name: str, columns: ty.Collection[str], unique: bool = False
|
|
9
|
+
) -> str:
|
|
10
|
+
"""Create an index on a table in a SQLite database using only sqlite3 and SQL.
|
|
11
|
+
|
|
12
|
+
Is idempotent, but does not verify that your index DDL matches what you're asking for."""
|
|
11
13
|
colnames = "_".join(colname for colname in columns).replace("-", "_")
|
|
14
|
+
idx_name = f"idx_{table_name}__{colnames}"
|
|
12
15
|
|
|
13
16
|
sql_create_index = (
|
|
14
17
|
f"CREATE {'UNIQUE' if unique else ''} INDEX IF NOT EXISTS "
|
|
15
|
-
f"[{
|
|
18
|
+
f"[{idx_name}] ON [{table_name}] ({', '.join(columns)})"
|
|
16
19
|
)
|
|
17
20
|
try:
|
|
18
|
-
|
|
21
|
+
autoconnect(connectable).execute(sql_create_index)
|
|
19
22
|
# do not commit - let the caller decide when to commit, or allow autoconnect to do its job
|
|
23
|
+
return idx_name
|
|
20
24
|
except Exception:
|
|
21
|
-
print(sql_create_index)
|
|
25
|
+
print("FAILURE TO CREATE INDEX: " + sql_create_index)
|
|
22
26
|
raise
|
|
@@ -22,14 +22,14 @@ thds/core/hashing.py,sha256=dR4HEWcAdU8O-9ASGkl8naKs6I1Sd7aps4EcVefvVLQ,4246
|
|
|
22
22
|
thds/core/home.py,sha256=tTClL_AarIKeri1aNCpuIC6evD7qr83ESGD173B81hU,470
|
|
23
23
|
thds/core/hostname.py,sha256=canFGr-JaaG7nUfsQlyL0JT-2tnZoT1BvXzyaOMK1vA,208
|
|
24
24
|
thds/core/imports.py,sha256=0LVegY8I8_XKZPcqiIp2OVVzEDtyqYA3JETf9OAKNKs,568
|
|
25
|
-
thds/core/inspect.py,sha256=
|
|
25
|
+
thds/core/inspect.py,sha256=vBuVJ9aKR_WT0W8SZ59UfZ3NXVS330WCeef1uUe546w,3426
|
|
26
26
|
thds/core/iterators.py,sha256=h0JBu2-rYhKMfJTDlZWfyHQWzgtIO8vp_Sp0gENFo7g,645
|
|
27
27
|
thds/core/lazy.py,sha256=e1WvG4LsbEydV0igEr_Vl1cq05zlQNIE8MFYT90yglE,3289
|
|
28
28
|
thds/core/link.py,sha256=4-9d22l_oSkKoSzlYEO-rwxO1hvvj6VETY7LwvGcX6M,5534
|
|
29
29
|
thds/core/logical_root.py,sha256=gWkIYRv9kNQfzbpxJaYiwNXVz1neZ2NvnvProtOn9d8,1399
|
|
30
30
|
thds/core/merge_args.py,sha256=7oj7dtO1-XVkfTM3aBlq3QlZbo8tb6X7E3EVIR-60t8,5781
|
|
31
31
|
thds/core/meta.py,sha256=Df0DxV5UzHcEsu5UCYaE1BWipMPTEXycn9Ug4cdquMk,12114
|
|
32
|
-
thds/core/parallel.py,sha256=
|
|
32
|
+
thds/core/parallel.py,sha256=is12mjELQwueUfrpTyu9d4Ow20641nGozw0Qib5bmWA,9108
|
|
33
33
|
thds/core/pickle_visit.py,sha256=QNMWIi5buvk2zsvx1-D-FKL7tkrFUFDs387vxgGebgU,833
|
|
34
34
|
thds/core/prof.py,sha256=5ViolfPsAPwUTHuhAe-bon7IArPGXydpGoB5uZmObDk,8264
|
|
35
35
|
thds/core/progress.py,sha256=tY8tc_6CMnu_O8DVisnsRoDpFJOw5vqyYzLhQDxsLn8,4361
|
|
@@ -46,11 +46,11 @@ thds/core/thunks.py,sha256=LxwqUsu3YPVDleGbNk5JWZIncDYwvM8wUBNOS2L09zs,1056
|
|
|
46
46
|
thds/core/timer.py,sha256=aOpNP-wHKaKs6ONK5fOtIOgx00FChVZquG4PeaEYH_k,5376
|
|
47
47
|
thds/core/tmp.py,sha256=jA8FwDbXo3hx8o4kRjAlkwpcI77X86GY4Sktkps29ho,3166
|
|
48
48
|
thds/core/types.py,sha256=sFqI_8BsB1u85PSizjBZw8PBtplC7U54E19wZZWCEvI,152
|
|
49
|
-
thds/core/log/__init__.py,sha256=
|
|
49
|
+
thds/core/log/__init__.py,sha256=wbHgzqHln74wfqM7gLzRURgOn9V_-2dOnoOBi37xczU,1354
|
|
50
50
|
thds/core/log/basic_config.py,sha256=2Y9U_c4PTrIsCmaN7Ps6Xr90AhJPzdYjeUzUMqO7oFU,6704
|
|
51
51
|
thds/core/log/json_formatter.py,sha256=C5bRsSbAqaQqfTm88jc3mYe3vwKZZLAxET8s7_u7aN0,1757
|
|
52
52
|
thds/core/log/kw_formatter.py,sha256=9-MVOd2r5NEkYNne9qWyFMeR5lac3w7mjHXsDa681i0,3379
|
|
53
|
-
thds/core/log/kw_logger.py,sha256=
|
|
53
|
+
thds/core/log/kw_logger.py,sha256=bXajphFJXOtiIOix7a4uOs_gzqTiPu-76uVGIZK7A5E,4351
|
|
54
54
|
thds/core/log/logfmt.py,sha256=i66zoG2oERnE1P_0TVXdlfJ1YgUmvtMjqRtdV5u2SvU,10366
|
|
55
55
|
thds/core/source/__init__.py,sha256=e-cRoLl1HKY3YrDjpV5p_i7zvr1L4q51-t1ISTxdig4,543
|
|
56
56
|
thds/core/source/_construct.py,sha256=jtsh0Du67TslWjCLASZ3pAMeaiowfgm7Bt50zIhwx7k,4330
|
|
@@ -64,7 +64,7 @@ thds/core/sqlite/connect.py,sha256=l4QaSAI8RjP7Qh2FjmJ3EwRgfGf65Z3-LjtC9ocHM_U,9
|
|
|
64
64
|
thds/core/sqlite/copy.py,sha256=y3IRQTBrWDfKuVIfW7fYuEgwRCRKHjN0rxVFkIb9VrQ,1155
|
|
65
65
|
thds/core/sqlite/ddl.py,sha256=k9BvmDzb0rrlhmEpXkB6ESaZAUWtbL58x-70sPyoFk4,201
|
|
66
66
|
thds/core/sqlite/functions.py,sha256=AOIRzb7lNxmFm1J5JS6R8Nl-dSv3Dy47UNZVVjl1rvk,2158
|
|
67
|
-
thds/core/sqlite/index.py,sha256=
|
|
67
|
+
thds/core/sqlite/index.py,sha256=cd-pxhb2lN2kG3ypp4hRnWSYbdLGM8Gb95PvHIjI41U,979
|
|
68
68
|
thds/core/sqlite/insert_utils.py,sha256=BNI3VUdqwBdaqa0xqiJrhE6XyzPsTF8N4KKKdb4Vfes,884
|
|
69
69
|
thds/core/sqlite/merge.py,sha256=NxettDMJ_mcrWfteQn_ERY7MUB5ETR-yJLKg7uvF6zA,3779
|
|
70
70
|
thds/core/sqlite/meta.py,sha256=8Gh4FhTzU86FK8oWosoyPfT0EVd-kfieThEQBrD-l30,7299
|
|
@@ -74,8 +74,8 @@ thds/core/sqlite/structured.py,sha256=8t1B6XbM5NnudKEeBLsdjRVbSXXSr6iHOW0HwEAqtX
|
|
|
74
74
|
thds/core/sqlite/types.py,sha256=oq8m0UrvSn1IqWWcQ4FPptfAhdj6DllnCe7puVqSHlY,1297
|
|
75
75
|
thds/core/sqlite/upsert.py,sha256=BmKK6fsGVedt43iY-Lp7dnAu8aJ1e9CYlPVEQR2pMj4,5827
|
|
76
76
|
thds/core/sqlite/write.py,sha256=z0219vDkQDCnsV0WLvsj94keItr7H4j7Y_evbcoBrWU,3458
|
|
77
|
-
thds_core-1.
|
|
78
|
-
thds_core-1.
|
|
79
|
-
thds_core-1.
|
|
80
|
-
thds_core-1.
|
|
81
|
-
thds_core-1.
|
|
77
|
+
thds_core-1.47.20251107140605.dist-info/METADATA,sha256=pWgf_iEpaoY8sKrnc0qDjE4Sb9bP67fcDFjBNhLEeag,2216
|
|
78
|
+
thds_core-1.47.20251107140605.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
79
|
+
thds_core-1.47.20251107140605.dist-info/entry_points.txt,sha256=bOCOVhKZv7azF3FvaWX6uxE6yrjK6FcjqhtxXvLiFY8,161
|
|
80
|
+
thds_core-1.47.20251107140605.dist-info/top_level.txt,sha256=LTZaE5SkWJwv9bwOlMbIhiS-JWQEEIcjVYnJrt-CriY,5
|
|
81
|
+
thds_core-1.47.20251107140605.dist-info/RECORD,,
|
|
File without changes
|
{thds_core-1.46.20251009215019.dist-info → thds_core-1.47.20251107140605.dist-info}/entry_points.txt
RENAMED
|
File without changes
|
{thds_core-1.46.20251009215019.dist-info → thds_core-1.47.20251107140605.dist-info}/top_level.txt
RENAMED
|
File without changes
|