sae-lens 6.14.1__py3-none-any.whl → 6.22.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,5 @@
1
1
  import contextlib
2
+ import math
2
3
  from pathlib import Path
3
4
  from typing import Any, Callable, Generic, Protocol
4
5
 
@@ -10,7 +11,11 @@ from tqdm.auto import tqdm
10
11
 
11
12
  from sae_lens import __version__
12
13
  from sae_lens.config import SAETrainerConfig
13
- from sae_lens.constants import ACTIVATION_SCALER_CFG_FILENAME, SPARSITY_FILENAME
14
+ from sae_lens.constants import (
15
+ ACTIVATION_SCALER_CFG_FILENAME,
16
+ SPARSITY_FILENAME,
17
+ TRAINER_STATE_FILENAME,
18
+ )
14
19
  from sae_lens.saes.sae import (
15
20
  T_TRAINING_SAE,
16
21
  T_TRAINING_SAE_CONFIG,
@@ -56,6 +61,7 @@ class SAETrainer(Generic[T_TRAINING_SAE, T_TRAINING_SAE_CONFIG]):
56
61
  data_provider: DataProvider
57
62
  activation_scaler: ActivationScaler
58
63
  evaluator: Evaluator[T_TRAINING_SAE] | None
64
+ coefficient_schedulers: dict[str, CoefficientScheduler]
59
65
 
60
66
  def __init__(
61
67
  self,
@@ -84,7 +90,9 @@ class SAETrainer(Generic[T_TRAINING_SAE, T_TRAINING_SAE_CONFIG]):
84
90
  range(
85
91
  0,
86
92
  cfg.total_training_samples,
87
- cfg.total_training_samples // self.cfg.n_checkpoints,
93
+ math.ceil(
94
+ cfg.total_training_samples / (self.cfg.n_checkpoints + 1)
95
+ ),
88
96
  )
89
97
  )[1:]
90
98
 
@@ -93,11 +101,6 @@ class SAETrainer(Generic[T_TRAINING_SAE, T_TRAINING_SAE_CONFIG]):
93
101
  sae.cfg.d_sae, device=cfg.device
94
102
  )
95
103
  self.n_frac_active_samples = 0
96
- # we don't train the scaling factor (initially)
97
- # set requires grad to false for the scaling factor
98
- for name, param in self.sae.named_parameters():
99
- if "scaling_factor" in name:
100
- param.requires_grad = False
101
104
 
102
105
  self.optimizer = Adam(
103
106
  sae.parameters(),
@@ -210,10 +213,7 @@ class SAETrainer(Generic[T_TRAINING_SAE, T_TRAINING_SAE_CONFIG]):
210
213
  sparsity_path = checkpoint_path / SPARSITY_FILENAME
211
214
  save_file({"sparsity": self.log_feature_sparsity}, sparsity_path)
212
215
 
213
- activation_scaler_path = (
214
- checkpoint_path / ACTIVATION_SCALER_CFG_FILENAME
215
- )
216
- self.activation_scaler.save(str(activation_scaler_path))
216
+ self.save_trainer_state(checkpoint_path)
217
217
 
218
218
  if self.cfg.logger.log_to_wandb:
219
219
  self.cfg.logger.log(
@@ -227,6 +227,44 @@ class SAETrainer(Generic[T_TRAINING_SAE, T_TRAINING_SAE_CONFIG]):
227
227
  if self.save_checkpoint_fn is not None:
228
228
  self.save_checkpoint_fn(checkpoint_path=checkpoint_path)
229
229
 
230
+ def save_trainer_state(self, checkpoint_path: Path) -> None:
231
+ checkpoint_path.mkdir(exist_ok=True, parents=True)
232
+ scheduler_state_dicts = {
233
+ name: scheduler.state_dict()
234
+ for name, scheduler in self.coefficient_schedulers.items()
235
+ }
236
+ torch.save(
237
+ {
238
+ "optimizer": self.optimizer.state_dict(),
239
+ "lr_scheduler": self.lr_scheduler.state_dict(),
240
+ "n_training_samples": self.n_training_samples,
241
+ "n_training_steps": self.n_training_steps,
242
+ "act_freq_scores": self.act_freq_scores,
243
+ "n_forward_passes_since_fired": self.n_forward_passes_since_fired,
244
+ "n_frac_active_samples": self.n_frac_active_samples,
245
+ "started_fine_tuning": self.started_fine_tuning,
246
+ "coefficient_schedulers": scheduler_state_dicts,
247
+ },
248
+ str(checkpoint_path / TRAINER_STATE_FILENAME),
249
+ )
250
+ activation_scaler_path = checkpoint_path / ACTIVATION_SCALER_CFG_FILENAME
251
+ self.activation_scaler.save(str(activation_scaler_path))
252
+
253
+ def load_trainer_state(self, checkpoint_path: Path | str) -> None:
254
+ checkpoint_path = Path(checkpoint_path)
255
+ self.activation_scaler.load(checkpoint_path / ACTIVATION_SCALER_CFG_FILENAME)
256
+ state_dict = torch.load(checkpoint_path / TRAINER_STATE_FILENAME)
257
+ self.optimizer.load_state_dict(state_dict["optimizer"])
258
+ self.lr_scheduler.load_state_dict(state_dict["lr_scheduler"])
259
+ self.n_training_samples = state_dict["n_training_samples"]
260
+ self.n_training_steps = state_dict["n_training_steps"]
261
+ self.act_freq_scores = state_dict["act_freq_scores"]
262
+ self.n_forward_passes_since_fired = state_dict["n_forward_passes_since_fired"]
263
+ self.n_frac_active_samples = state_dict["n_frac_active_samples"]
264
+ self.started_fine_tuning = state_dict["started_fine_tuning"]
265
+ for name, scheduler_state_dict in state_dict["coefficient_schedulers"].items():
266
+ self.coefficient_schedulers[name].load_state_dict(scheduler_state_dict)
267
+
230
268
  def _train_step(
231
269
  self,
232
270
  sae: T_TRAINING_SAE,
@@ -249,6 +287,7 @@ class SAETrainer(Generic[T_TRAINING_SAE, T_TRAINING_SAE_CONFIG]):
249
287
  sae_in=sae_in,
250
288
  dead_neuron_mask=self.dead_neurons,
251
289
  coefficients=self.get_coefficients(),
290
+ n_training_steps=self.n_training_steps,
252
291
  ),
253
292
  )
254
293
 
sae_lens/util.py CHANGED
@@ -5,6 +5,8 @@ from dataclasses import asdict, fields, is_dataclass
5
5
  from pathlib import Path
6
6
  from typing import Sequence, TypeVar
7
7
 
8
+ from transformers import PreTrainedTokenizerBase
9
+
8
10
  K = TypeVar("K")
9
11
  V = TypeVar("V")
10
12
 
@@ -63,3 +65,28 @@ def path_or_tmp_dir(path: str | Path | None):
63
65
  yield Path(td)
64
66
  else:
65
67
  yield Path(path)
68
+
69
+
70
+ def get_special_token_ids(tokenizer: PreTrainedTokenizerBase) -> list[int]:
71
+ """Get all special token IDs from a tokenizer."""
72
+ special_tokens = set()
73
+
74
+ # Get special tokens from tokenizer attributes
75
+ for attr in dir(tokenizer):
76
+ if attr.endswith("_token_id"):
77
+ token_id = getattr(tokenizer, attr)
78
+ if token_id is not None:
79
+ special_tokens.add(token_id)
80
+
81
+ # Get any additional special tokens from the tokenizer's special tokens map
82
+ if hasattr(tokenizer, "special_tokens_map"):
83
+ for token in tokenizer.special_tokens_map.values():
84
+ if isinstance(token, str):
85
+ token_id = tokenizer.convert_tokens_to_ids(token) # type: ignore
86
+ special_tokens.add(token_id)
87
+ elif isinstance(token, list):
88
+ for t in token:
89
+ token_id = tokenizer.convert_tokens_to_ids(t) # type: ignore
90
+ special_tokens.add(token_id)
91
+
92
+ return list(special_tokens)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sae-lens
3
- Version: 6.14.1
3
+ Version: 6.22.1
4
4
  Summary: Training and Analyzing Sparse Autoencoders (SAEs)
5
5
  License: MIT
6
6
  License-File: LICENSE
@@ -30,19 +30,19 @@ Requires-Dist: tenacity (>=9.0.0)
30
30
  Requires-Dist: transformer-lens (>=2.16.1,<3.0.0)
31
31
  Requires-Dist: transformers (>=4.38.1,<5.0.0)
32
32
  Requires-Dist: typing-extensions (>=4.10.0,<5.0.0)
33
- Project-URL: Homepage, https://jbloomaus.github.io/SAELens
34
- Project-URL: Repository, https://github.com/jbloomAus/SAELens
33
+ Project-URL: Homepage, https://decoderesearch.github.io/SAELens
34
+ Project-URL: Repository, https://github.com/decoderesearch/SAELens
35
35
  Description-Content-Type: text/markdown
36
36
 
37
- <img width="1308" alt="Screenshot 2024-03-21 at 3 08 28 pm" src="https://github.com/jbloomAus/mats_sae_training/assets/69127271/209012ec-a779-4036-b4be-7b7739ea87f6">
37
+ <img width="1308" height="532" alt="saes_pic" src="https://github.com/user-attachments/assets/2a5d752f-b261-4ee4-ad5d-ebf282321371" />
38
38
 
39
39
  # SAE Lens
40
40
 
41
41
  [![PyPI](https://img.shields.io/pypi/v/sae-lens?color=blue)](https://pypi.org/project/sae-lens/)
42
42
  [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
43
- [![build](https://github.com/jbloomAus/SAELens/actions/workflows/build.yml/badge.svg)](https://github.com/jbloomAus/SAELens/actions/workflows/build.yml)
44
- [![Deploy Docs](https://github.com/jbloomAus/SAELens/actions/workflows/deploy_docs.yml/badge.svg)](https://github.com/jbloomAus/SAELens/actions/workflows/deploy_docs.yml)
45
- [![codecov](https://codecov.io/gh/jbloomAus/SAELens/graph/badge.svg?token=N83NGH8CGE)](https://codecov.io/gh/jbloomAus/SAELens)
43
+ [![build](https://github.com/decoderesearch/SAELens/actions/workflows/build.yml/badge.svg)](https://github.com/decoderesearch/SAELens/actions/workflows/build.yml)
44
+ [![Deploy Docs](https://github.com/decoderesearch/SAELens/actions/workflows/deploy_docs.yml/badge.svg)](https://github.com/decoderesearch/SAELens/actions/workflows/deploy_docs.yml)
45
+ [![codecov](https://codecov.io/gh/decoderesearch/SAELens/graph/badge.svg?token=N83NGH8CGE)](https://codecov.io/gh/decoderesearch/SAELens)
46
46
 
47
47
  SAELens exists to help researchers:
48
48
 
@@ -50,7 +50,7 @@ SAELens exists to help researchers:
50
50
  - Analyse sparse autoencoders / research mechanistic interpretability.
51
51
  - Generate insights which make it easier to create safe and aligned AI systems.
52
52
 
53
- Please refer to the [documentation](https://jbloomaus.github.io/SAELens/) for information on how to:
53
+ Please refer to the [documentation](https://decoderesearch.github.io/SAELens/) for information on how to:
54
54
 
55
55
  - Download and Analyse pre-trained sparse autoencoders.
56
56
  - Train your own sparse autoencoders.
@@ -58,25 +58,25 @@ Please refer to the [documentation](https://jbloomaus.github.io/SAELens/) for in
58
58
 
59
59
  SAE Lens is the result of many contributors working collectively to improve humanity's understanding of neural networks, many of whom are motivated by a desire to [safeguard humanity from risks posed by artificial intelligence](https://80000hours.org/problem-profiles/artificial-intelligence/).
60
60
 
61
- This library is maintained by [Joseph Bloom](https://www.jbloomaus.com/), [Curt Tigges](https://curttigges.com/), [Anthony Duong](https://github.com/anthonyduong9) and [David Chanin](https://github.com/chanind).
61
+ This library is maintained by [Joseph Bloom](https://www.decoderesearch.com/), [Curt Tigges](https://curttigges.com/), [Anthony Duong](https://github.com/anthonyduong9) and [David Chanin](https://github.com/chanind).
62
62
 
63
63
  ## Loading Pre-trained SAEs.
64
64
 
65
- Pre-trained SAEs for various models can be imported via SAE Lens. See this [page](https://jbloomaus.github.io/SAELens/sae_table/) in the readme for a list of all SAEs.
65
+ Pre-trained SAEs for various models can be imported via SAE Lens. See this [page](https://decoderesearch.github.io/SAELens/sae_table/) in the readme for a list of all SAEs.
66
66
 
67
67
  ## Migrating to SAELens v6
68
68
 
69
- The new v6 update is a major refactor to SAELens and changes the way training code is structured. Check out the [migration guide](https://jbloomaus.github.io/SAELens/latest/migrating/) for more details.
69
+ The new v6 update is a major refactor to SAELens and changes the way training code is structured. Check out the [migration guide](https://decoderesearch.github.io/SAELens/latest/migrating/) for more details.
70
70
 
71
71
  ## Tutorials
72
72
 
73
- - [SAE Lens + Neuronpedia](tutorials/tutorial_2_0.ipynb)[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://githubtocolab.com/jbloomAus/SAELens/blob/main/tutorials/tutorial_2_0.ipynb)
73
+ - [SAE Lens + Neuronpedia](tutorials/tutorial_2_0.ipynb)[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://githubtocolab.com/decoderesearch/SAELens/blob/main/tutorials/tutorial_2_0.ipynb)
74
74
  - [Loading and Analysing Pre-Trained Sparse Autoencoders](tutorials/basic_loading_and_analysing.ipynb)
75
- [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://githubtocolab.com/jbloomAus/SAELens/blob/main/tutorials/basic_loading_and_analysing.ipynb)
75
+ [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://githubtocolab.com/decoderesearch/SAELens/blob/main/tutorials/basic_loading_and_analysing.ipynb)
76
76
  - [Understanding SAE Features with the Logit Lens](tutorials/logits_lens_with_features.ipynb)
77
- [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://githubtocolab.com/jbloomAus/SAELens/blob/main/tutorials/logits_lens_with_features.ipynb)
77
+ [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://githubtocolab.com/decoderesearch/SAELens/blob/main/tutorials/logits_lens_with_features.ipynb)
78
78
  - [Training a Sparse Autoencoder](tutorials/training_a_sparse_autoencoder.ipynb)
79
- [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://githubtocolab.com/jbloomAus/SAELens/blob/main/tutorials/training_a_sparse_autoencoder.ipynb)
79
+ [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://githubtocolab.com/decoderesearch/SAELens/blob/main/tutorials/training_a_sparse_autoencoder.ipynb)
80
80
 
81
81
  ## Join the Slack!
82
82
 
@@ -91,7 +91,7 @@ Please cite the package as follows:
91
91
  title = {SAELens},
92
92
  author = {Bloom, Joseph and Tigges, Curt and Duong, Anthony and Chanin, David},
93
93
  year = {2024},
94
- howpublished = {\url{https://github.com/jbloomAus/SAELens}},
94
+ howpublished = {\url{https://github.com/decoderesearch/SAELens}},
95
95
  }
96
96
  ```
97
97
 
@@ -0,0 +1,41 @@
1
+ sae_lens/__init__.py,sha256=v-2uKiNW5UNVCRt7vyBrvI0olJsXIxaPp9TJvo-m9wg,4033
2
+ sae_lens/analysis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ sae_lens/analysis/hooked_sae_transformer.py,sha256=dQRgGVwce8XwylL2AzJE7l9elhtMRFCs2hdUj-Qyy4g,14038
4
+ sae_lens/analysis/neuronpedia_integration.py,sha256=Gx1W7hUBEuMoasNcnOnZ1wmqbXDd1pSZ1nqKEya1HQc,4962
5
+ sae_lens/cache_activations_runner.py,sha256=RNN_nDQkH0lqEIxTAIDx3g1cgAzRxQWBSBEXA6nbWh0,12565
6
+ sae_lens/config.py,sha256=fxvpQxFfPOVUkryiHD19q9O1AJDSkIguWeYlbJuTxmY,30329
7
+ sae_lens/constants.py,sha256=qX12uAE_xkha6hjss_0MGTbakI7gEkJzHABkZaHWQFU,683
8
+ sae_lens/evals.py,sha256=P0NUsJeGzYxFBiVKhbPzd72IFKY4gH40HHlEZ3jEAmg,39598
9
+ sae_lens/llm_sae_training_runner.py,sha256=M7BK55gSFYu2qFQKABHX3c8i46P1LfODCeyHFzGGuqU,15196
10
+ sae_lens/load_model.py,sha256=C8AMykctj6H7tz_xRwB06-EXj6TfW64PtSJZR5Jxn1Y,8649
11
+ sae_lens/loading/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
+ sae_lens/loading/pretrained_sae_loaders.py,sha256=X-gVZ4A74E85lSMFMsZ_rEQhHlR9AYFwhxvoA_vt2CQ,56051
13
+ sae_lens/loading/pretrained_saes_directory.py,sha256=hejNfLUepYCSGPalRfQwxxCEUqMMUPsn1tufwvwct5k,3820
14
+ sae_lens/pretokenize_runner.py,sha256=x-reJzVPFDS9iRFbZtrFYSzNguJYki9gd0pbHjYJ3r4,7085
15
+ sae_lens/pretrained_saes.yaml,sha256=VzgJ_t-IEWpO2MabgQY6CAcg8FFsqZWiOVXjqvqfgeE,604973
16
+ sae_lens/registry.py,sha256=nhy7BPSudSATqW4lo9H_k3Na7sfGHmAf9v-3wpnLL_o,1490
17
+ sae_lens/saes/__init__.py,sha256=nTNPnJ7edyfedo1MX96xwn9WOG8504yHbT9LFw9od_0,1778
18
+ sae_lens/saes/batchtopk_sae.py,sha256=x4EbgZl0GUickRPcCmtKNGS2Ra3Uy1Z1OtF2FnrSabQ,5422
19
+ sae_lens/saes/gated_sae.py,sha256=Jq74JGtqpO6tW3XdJGbURTTWN_fAoAMKu9T7O-MZTeE,8793
20
+ sae_lens/saes/jumprelu_sae.py,sha256=zUGHWOFXbeDBS3mjkOE3ikxlEniq2EX9rCAizLMOpp4,13206
21
+ sae_lens/saes/matryoshka_batchtopk_sae.py,sha256=Qr6htt1HHOuO9FXI9hyaPSnGFIiJG-v7y1t1CEmkFzM,5995
22
+ sae_lens/saes/sae.py,sha256=q8ylAdqtkNAms7X-3y1QIBfHOZ-FvKHvCap7Tw_cnzE,37733
23
+ sae_lens/saes/standard_sae.py,sha256=nEVETwAmRD2tyX7ESIic1fij48gAq1Dh7s_GQ2fqCZ4,5747
24
+ sae_lens/saes/temporal_sae.py,sha256=DsecivcHWId-MTuJpQbz8OhqtmGhZACxJauYZGHo0Ok,13272
25
+ sae_lens/saes/topk_sae.py,sha256=D1N4LHGOeV8dhHW0i3HqBT1cqA-E1Plq11uMJtVfNBo,21057
26
+ sae_lens/saes/transcoder.py,sha256=BfLSbTYVNZh-ruGxseZiZJ_acEL6_7QyTdfqUr0lDOg,12156
27
+ sae_lens/tokenization_and_batching.py,sha256=D_o7cXvRqhT89H3wNzoRymNALNE6eHojBWLdXOUwUGE,5438
28
+ sae_lens/training/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
+ sae_lens/training/activation_scaler.py,sha256=FzNfgBplLWmyiSlZ6TUvE-nur3lOiGTrlvC97ys8S24,1973
30
+ sae_lens/training/activations_store.py,sha256=1ozCANGXO8Vx9d_l-heb-MsSpUoYcHagcve5JLGwZYY,33762
31
+ sae_lens/training/mixing_buffer.py,sha256=vDpYG5ZE70szDvBsRKcNHEES3h_WTKJ16qDYk5jPOVA,2015
32
+ sae_lens/training/optim.py,sha256=bJpqqcK4enkcPvQAJkeH4Ci1LUOlfjIMTv6-IlaAbRA,5588
33
+ sae_lens/training/sae_trainer.py,sha256=zhkabyIKxI_tZTV3_kwz6zMrHZ95Ecr97krmwc-9ffs,17600
34
+ sae_lens/training/types.py,sha256=1FpLx_Doda9vZpmfm-x1e8wGBYpyhe9Kpb_JuM5nIFM,90
35
+ sae_lens/training/upload_saes_to_huggingface.py,sha256=r_WzI1zLtGZ5TzAxuG3xa_8T09j3zXJrWd_vzPsPGkQ,4469
36
+ sae_lens/tutorial/tsea.py,sha256=fd1am_XXsf2KMbByDapJo-2qlxduKaa62Z2qcQZ3QKU,18145
37
+ sae_lens/util.py,sha256=tCovQ-eZa1L7thPpNDL6PGOJrIMML2yLI5e0EHCOpS8,3309
38
+ sae_lens-6.22.1.dist-info/METADATA,sha256=QoCu9iHTvA66XSkU2aR_4VxP7wGFr_NQPJUZwxvaOak,5369
39
+ sae_lens-6.22.1.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
40
+ sae_lens-6.22.1.dist-info/licenses/LICENSE,sha256=DW6e-hDosiu4CfW0-imI57sV1I5f9UEslpviNQcOAKs,1069
41
+ sae_lens-6.22.1.dist-info/RECORD,,
@@ -1,39 +0,0 @@
1
- sae_lens/__init__.py,sha256=bh_CgiUTwniwjnBsHPO170zHd10hLM5fCeAgMZc-8n4,3589
2
- sae_lens/analysis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- sae_lens/analysis/hooked_sae_transformer.py,sha256=vRu6JseH1lZaEeILD5bEkQEQ1wYHHDcxD-f2olKmE9Y,14275
4
- sae_lens/analysis/neuronpedia_integration.py,sha256=Gx1W7hUBEuMoasNcnOnZ1wmqbXDd1pSZ1nqKEya1HQc,4962
5
- sae_lens/cache_activations_runner.py,sha256=cNeAtp2JQ_vKbeddZVM-tcPLYyyfTWL8NDna5KQpkLI,12583
6
- sae_lens/config.py,sha256=IdRXSKPfYY3hwUovj-u83eep8z52gkJHII0mY0KseYY,28739
7
- sae_lens/constants.py,sha256=CSjmiZ-bhjQeVLyRvWxAjBokCgkfM8mnvd7-vxLIWTY,639
8
- sae_lens/evals.py,sha256=p4AOueeemhJXyfLx2TxOva8LXxXj63JSKe9Lnib3mHs,39623
9
- sae_lens/llm_sae_training_runner.py,sha256=sJTcDX1bUJJ_jZLUT88-8KUYIAPeUGoXktX68PsBqw0,15137
10
- sae_lens/load_model.py,sha256=C8AMykctj6H7tz_xRwB06-EXj6TfW64PtSJZR5Jxn1Y,8649
11
- sae_lens/loading/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- sae_lens/loading/pretrained_sae_loaders.py,sha256=SM4aT8NM6ezYix5c2u7p72Fz2RfvTtf7gw5RdOSKXhc,49846
13
- sae_lens/loading/pretrained_saes_directory.py,sha256=4Vn-Jex6SveD7EbxcSOBv8cx1gkPfUMLU1QOP-ww1ZE,3752
14
- sae_lens/pretokenize_runner.py,sha256=x-reJzVPFDS9iRFbZtrFYSzNguJYki9gd0pbHjYJ3r4,7085
15
- sae_lens/pretrained_saes.yaml,sha256=6ca3geEB6NyhULUrmdtPDK8ea0YdpLp8_au78vIFC5w,602553
16
- sae_lens/registry.py,sha256=nhy7BPSudSATqW4lo9H_k3Na7sfGHmAf9v-3wpnLL_o,1490
17
- sae_lens/saes/__init__.py,sha256=jVwazK8Q6dW5J6_zFXPoNAuBvSxgziQ8eMOjGM3t-X8,1475
18
- sae_lens/saes/batchtopk_sae.py,sha256=zxIke8lOBKkQEMVFk6sSW6q_s6F9RKhysLqfqG9ecwI,5300
19
- sae_lens/saes/gated_sae.py,sha256=qcmM9JwBA8aZR8z_IRHV1_gQX-q_63tKewWXRnhdXuo,8986
20
- sae_lens/saes/jumprelu_sae.py,sha256=HHBF1sJ95lZvxwP5vwLSQFKdnJN2KKYK0WAEaLTrta0,13399
21
- sae_lens/saes/sae.py,sha256=nuII6ZmaVtJWhPjyhasHQyiv_Wj-zdAtRQqJRYbVBQs,38274
22
- sae_lens/saes/standard_sae.py,sha256=9UqYyYtQuThYxXKNaDjYcyowpOx2-7cShG-TeUP6JCQ,5940
23
- sae_lens/saes/topk_sae.py,sha256=tzQM5eQFifMe--8_8NUBYWY7hpjQa6A_olNe6U71FE8,21275
24
- sae_lens/saes/transcoder.py,sha256=BfLSbTYVNZh-ruGxseZiZJ_acEL6_7QyTdfqUr0lDOg,12156
25
- sae_lens/tokenization_and_batching.py,sha256=D_o7cXvRqhT89H3wNzoRymNALNE6eHojBWLdXOUwUGE,5438
26
- sae_lens/training/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
- sae_lens/training/activation_scaler.py,sha256=seEE-2Qd2JMHxqgnsNWPt-DGtYGZxWPnOwCGuVNSOtI,1719
28
- sae_lens/training/activations_store.py,sha256=2EUY2abqpT5El3T95sypM_JRDgiKL3VeT73U9SQIFGY,32903
29
- sae_lens/training/mixing_buffer.py,sha256=vDpYG5ZE70szDvBsRKcNHEES3h_WTKJ16qDYk5jPOVA,2015
30
- sae_lens/training/optim.py,sha256=TiI9nbffzXNsI8WjcIsqa2uheW6suxqL_KDDmWXobWI,5312
31
- sae_lens/training/sae_trainer.py,sha256=il4Evf-c4F3Uf2n_v-AOItCasX-uPxYTzn_sZLvLkl0,15633
32
- sae_lens/training/types.py,sha256=1FpLx_Doda9vZpmfm-x1e8wGBYpyhe9Kpb_JuM5nIFM,90
33
- sae_lens/training/upload_saes_to_huggingface.py,sha256=r_WzI1zLtGZ5TzAxuG3xa_8T09j3zXJrWd_vzPsPGkQ,4469
34
- sae_lens/tutorial/tsea.py,sha256=fd1am_XXsf2KMbByDapJo-2qlxduKaa62Z2qcQZ3QKU,18145
35
- sae_lens/util.py,sha256=lW7fBn_b8quvRYlen9PUmB7km60YhKyjmuelB1f6KzQ,2253
36
- sae_lens-6.14.1.dist-info/METADATA,sha256=ZE2ppvNRrI_CAr7jQ2TdcPQmfEdhLoo-UMW83KVbtvU,5318
37
- sae_lens-6.14.1.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
38
- sae_lens-6.14.1.dist-info/licenses/LICENSE,sha256=DW6e-hDosiu4CfW0-imI57sV1I5f9UEslpviNQcOAKs,1069
39
- sae_lens-6.14.1.dist-info/RECORD,,