wisent 0.5.2__py3-none-any.whl → 0.5.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of wisent might be problematic. Click here for more details.
- wisent/__init__.py +1 -1
- wisent/core/__init__.py +0 -15
- wisent/core/utils/__init__.py +21 -0
- wisent/core/utils/device.py +56 -0
- {wisent-0.5.2.dist-info → wisent-0.5.4.dist-info}/METADATA +1 -1
- {wisent-0.5.2.dist-info → wisent-0.5.4.dist-info}/RECORD +9 -7
- {wisent-0.5.2.dist-info → wisent-0.5.4.dist-info}/WHEEL +0 -0
- {wisent-0.5.2.dist-info → wisent-0.5.4.dist-info}/licenses/LICENSE +0 -0
- {wisent-0.5.2.dist-info → wisent-0.5.4.dist-info}/top_level.txt +0 -0
wisent/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.5.
|
|
1
|
+
__version__ = "0.5.4"
|
wisent/core/__init__.py
CHANGED
|
@@ -1,24 +1,9 @@
|
|
|
1
|
-
from wisent.core.activations import Activations
|
|
2
|
-
from wisent.core.classifier.classifier import ActivationClassifier, Classifier
|
|
3
|
-
|
|
4
1
|
from .utils.device import empty_device_cache, preferred_dtype, resolve_default_device, resolve_device, resolve_torch_device
|
|
5
2
|
from .steering import SteeringMethod, SteeringType
|
|
6
3
|
|
|
7
4
|
__all__ = [
|
|
8
|
-
"ActivationClassifier",
|
|
9
|
-
"ActivationHooks",
|
|
10
|
-
"Activations",
|
|
11
|
-
"Classifier",
|
|
12
|
-
"ContrastivePairSet",
|
|
13
|
-
"Layer",
|
|
14
|
-
"Model",
|
|
15
|
-
"ModelParameterOptimizer",
|
|
16
|
-
"PromptFormat",
|
|
17
|
-
"SecureCodeEvaluator",
|
|
18
5
|
"SteeringMethod",
|
|
19
6
|
"SteeringType",
|
|
20
|
-
"TokenScore",
|
|
21
|
-
"enforce_secure_execution",
|
|
22
7
|
"empty_device_cache",
|
|
23
8
|
"preferred_dtype",
|
|
24
9
|
"resolve_default_device",
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from .device import (
|
|
2
|
+
DeviceKind,
|
|
3
|
+
empty_device_cache,
|
|
4
|
+
ensure_tensor_on_device,
|
|
5
|
+
move_module_to_preferred_device,
|
|
6
|
+
preferred_dtype,
|
|
7
|
+
resolve_default_device,
|
|
8
|
+
resolve_device,
|
|
9
|
+
resolve_torch_device,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
__all__ = [
|
|
13
|
+
"DeviceKind",
|
|
14
|
+
"empty_device_cache",
|
|
15
|
+
"ensure_tensor_on_device",
|
|
16
|
+
"move_module_to_preferred_device",
|
|
17
|
+
"preferred_dtype",
|
|
18
|
+
"resolve_default_device",
|
|
19
|
+
"resolve_device",
|
|
20
|
+
"resolve_torch_device",
|
|
21
|
+
]
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
"""Centralized torch device selection helpers."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from functools import lru_cache
|
|
6
|
+
from typing import Literal
|
|
7
|
+
|
|
8
|
+
import torch
|
|
9
|
+
|
|
10
|
+
DeviceKind = Literal["cuda", "mps", "cpu"]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def _mps_available() -> bool:
|
|
14
|
+
return getattr(torch.backends, "mps", None) is not None and torch.backends.mps.is_available()
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@lru_cache(maxsize=1)
|
|
18
|
+
def resolve_default_device() -> DeviceKind:
|
|
19
|
+
if torch.cuda.is_available():
|
|
20
|
+
return "cuda"
|
|
21
|
+
if _mps_available():
|
|
22
|
+
return "mps"
|
|
23
|
+
return "cpu"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def resolve_torch_device() -> torch.device:
|
|
27
|
+
return torch.device(resolve_default_device())
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def resolve_device(kind: DeviceKind | None = None) -> torch.device:
|
|
31
|
+
return torch.device(kind or resolve_default_device())
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def preferred_dtype(kind: DeviceKind | None = None) -> torch.dtype:
|
|
35
|
+
chosen = kind or resolve_default_device()
|
|
36
|
+
return torch.float16 if chosen in {"cuda", "mps"} else torch.float32
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def empty_device_cache(kind: DeviceKind | None = None) -> None:
|
|
40
|
+
chosen = kind or resolve_default_device()
|
|
41
|
+
if chosen == "cuda" and torch.cuda.is_available():
|
|
42
|
+
torch.cuda.empty_cache()
|
|
43
|
+
elif chosen == "mps" and _mps_available():
|
|
44
|
+
try:
|
|
45
|
+
torch.mps.empty_cache() # type: ignore[attr-defined]
|
|
46
|
+
except AttributeError:
|
|
47
|
+
pass
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def move_module_to_preferred_device(module: torch.nn.Module) -> torch.nn.Module:
|
|
51
|
+
return module.to(resolve_torch_device())
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def ensure_tensor_on_device(tensor: torch.Tensor) -> torch.Tensor:
|
|
55
|
+
target = resolve_torch_device()
|
|
56
|
+
return tensor.to(target) if tensor.device != target else tensor
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
wisent/__init__.py,sha256=
|
|
1
|
+
wisent/__init__.py,sha256=DITpct-LrdIsTgwx2NgH5Ghx5y8Xgz1YMimy1ZV5RTY,22
|
|
2
2
|
wisent/benchmarks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
3
|
wisent/benchmarks/coding/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
wisent/benchmarks/coding/metrics/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -52,7 +52,7 @@ wisent/cli/wisent_cli/commands/train_cmd.py,sha256=Pp_DY6P-zk8PvCuz6ds9JxH7BWpqQ
|
|
|
52
52
|
wisent/cli/wisent_cli/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
53
53
|
wisent/cli/wisent_cli/util/aggregations.py,sha256=RW2f-B18k4R0gFlIFACQmrhFKSwfxREUqqu5PaI951Y,1415
|
|
54
54
|
wisent/cli/wisent_cli/util/parsing.py,sha256=DvBTcoBItDGJQI-AE4bs0otBJ7ElLynNhqRDzgeQjzs,4067
|
|
55
|
-
wisent/core/__init__.py,sha256=
|
|
55
|
+
wisent/core/__init__.py,sha256=n2ytkljR_PHZDqXl6Q4O2kwoNbtCiP5l0JBHfkXkE2s,361
|
|
56
56
|
wisent/core/autonomous_agent.py,sha256=2k1PLWm2DZ6C2fmsjMDKmQ1_wfN7KhpilHrjkTw8nMw,52489
|
|
57
57
|
wisent/core/bigcode_integration.py,sha256=TIaPQDbPRDPdnCq8U-Gwl4lgayPfhOabOVQddqxotY4,19927
|
|
58
58
|
wisent/core/detection_handling.py,sha256=iiuKpzAbJfx_KFn2SFABQHOeeWblDJMXjzGwGDeKqcs,11127
|
|
@@ -180,6 +180,8 @@ wisent/core/trainers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
|
|
|
180
180
|
wisent/core/trainers/steering_trainer.py,sha256=JAdYwPhiakqN70WLqpZFZBsGHu2yed5v-TSM7ppjKJc,10557
|
|
181
181
|
wisent/core/trainers/core/__init__.py,sha256=D0JX0-XCHdtLrCXhVDHNQafvyWCvJ4-o4UKtkH1lI1k,1257
|
|
182
182
|
wisent/core/trainers/core/atoms.py,sha256=ycWk0G-7EIAEOnQL-o5_V5B8KTQ7CQUilGF4ibjighM,1536
|
|
183
|
+
wisent/core/utils/__init__.py,sha256=NavuBkpDSz6q1dN6m34-I-l8Aps1Sgcnx82FHxFufzY,457
|
|
184
|
+
wisent/core/utils/device.py,sha256=5brw9tclTU77NNAokzLXYrKUnjjYBzFfA7wNJPM9ytM,1609
|
|
183
185
|
wisent/opti/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
184
186
|
wisent/opti/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
185
187
|
wisent/opti/core/atoms.py,sha256=9UZeb_SOdDxQ6FBhdAf9qXaEXElImKUsoAMsV0c4yZg,5266
|
|
@@ -211,8 +213,8 @@ wisent/synthetic/generators/diversities/core/__init__.py,sha256=47DEQpj8HBSa-_TI
|
|
|
211
213
|
wisent/synthetic/generators/diversities/core/core.py,sha256=TjSj5T7NE5kRH-ABcFqb1Hz_j3Z6F_TcV-95uHD5Xw8,2201
|
|
212
214
|
wisent/synthetic/generators/diversities/methods/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
213
215
|
wisent/synthetic/generators/diversities/methods/fast_diversity.py,sha256=Z2UzTbzyJFM_ToxCoXM_LQQQ1Jc6BZknrbpikTG1MRw,8522
|
|
214
|
-
wisent-0.5.
|
|
215
|
-
wisent-0.5.
|
|
216
|
-
wisent-0.5.
|
|
217
|
-
wisent-0.5.
|
|
218
|
-
wisent-0.5.
|
|
216
|
+
wisent-0.5.4.dist-info/licenses/LICENSE,sha256=wy0iaw8b2tyqZAfKHib3lP3PJ9o88FDCg92oUHh3sDQ,1073
|
|
217
|
+
wisent-0.5.4.dist-info/METADATA,sha256=WO1PZ7JzoAyWKxfGNY66bIqyV6lpNk25teYSOhm49hE,2424
|
|
218
|
+
wisent-0.5.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
219
|
+
wisent-0.5.4.dist-info/top_level.txt,sha256=2Ts9Iyldnb3auIN2HBBaHPknRy7nSRDm2f6RGzYgr8A,7
|
|
220
|
+
wisent-0.5.4.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|