dl-backtrace 0.0.14__py3-none-any.whl → 0.0.16.dev4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dl-backtrace might be problematic. Click here for more details.

Files changed (27) hide show
  1. dl_backtrace/pytorch_backtrace/backtrace/backtrace.py +173 -44
  2. dl_backtrace/pytorch_backtrace/backtrace/utils/__init__.py +3 -0
  3. dl_backtrace/pytorch_backtrace/backtrace/utils/encoder.py +183 -0
  4. dl_backtrace/pytorch_backtrace/backtrace/utils/encoder_decoder.py +489 -0
  5. dl_backtrace/pytorch_backtrace/backtrace/utils/helper.py +95 -0
  6. dl_backtrace/pytorch_backtrace/backtrace/utils/prop.py +481 -0
  7. dl_backtrace/tf_backtrace/backtrace/__init__.py +1 -2
  8. dl_backtrace/tf_backtrace/backtrace/activation_info.py +33 -0
  9. dl_backtrace/tf_backtrace/backtrace/backtrace.py +506 -279
  10. dl_backtrace/tf_backtrace/backtrace/models.py +25 -0
  11. dl_backtrace/tf_backtrace/backtrace/server.py +27 -0
  12. dl_backtrace/tf_backtrace/backtrace/utils/__init__.py +5 -2
  13. dl_backtrace/tf_backtrace/backtrace/utils/encoder.py +206 -0
  14. dl_backtrace/tf_backtrace/backtrace/utils/encoder_decoder.py +501 -0
  15. dl_backtrace/tf_backtrace/backtrace/utils/helper.py +99 -0
  16. dl_backtrace/tf_backtrace/backtrace/utils/utils_contrast.py +1132 -0
  17. dl_backtrace/tf_backtrace/backtrace/utils/utils_prop.py +1582 -0
  18. dl_backtrace/version.py +2 -2
  19. {dl_backtrace-0.0.14.dist-info → dl_backtrace-0.0.16.dev4.dist-info}/METADATA +2 -2
  20. dl_backtrace-0.0.16.dev4.dist-info/RECORD +29 -0
  21. {dl_backtrace-0.0.14.dist-info → dl_backtrace-0.0.16.dev4.dist-info}/WHEEL +1 -1
  22. dl_backtrace/tf_backtrace/backtrace/config.py +0 -41
  23. dl_backtrace/tf_backtrace/backtrace/utils/contrast.py +0 -834
  24. dl_backtrace/tf_backtrace/backtrace/utils/prop.py +0 -725
  25. dl_backtrace-0.0.14.dist-info/RECORD +0 -21
  26. {dl_backtrace-0.0.14.dist-info → dl_backtrace-0.0.16.dev4.dist-info}/LICENSE +0 -0
  27. {dl_backtrace-0.0.14.dist-info → dl_backtrace-0.0.16.dev4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,25 @@
1
+ from typing import List, Dict, Optional
2
+ from pydantic import BaseModel
3
+
4
+ class InitModel(BaseModel):
5
+ model_link: str
6
+ model_type: str
7
+ token: str
8
+
9
+ class DelModel(BaseModel):
10
+ token: str
11
+
12
+ class Datapoint(BaseModel):
13
+ data: list
14
+ is_multi_input: bool
15
+ mode: str
16
+ token: str
17
+
18
+ # class Sample(BaseModel):
19
+ # model_code: str
20
+ # data: List[Datapoint]
21
+ # structure: str
22
+
23
+ # class BTresponse(BaseModel):
24
+ # relevance: dict
25
+ # status: str
@@ -0,0 +1,27 @@
1
+ from fastapi import FastAPI
2
+ from models import *
3
+ from utils import load_and_store, compute, delete_model
4
+
5
+ app = FastAPI()
6
+
7
+ @app.get("/")
8
+ async def read_root():
9
+ return {"message": "Operational"}
10
+
11
+
12
+ @app.post("/init")
13
+ async def init_model(init_model: InitModel):
14
+ resp = load_and_store(init_model)
15
+ return resp
16
+
17
+ @app.post("/eval")
18
+ async def process(sample: Datapoint):
19
+ resp = compute(sample)
20
+ return resp
21
+
22
+ @app.post("/delete")
23
+ async def remove_model(del_model: DelModel):
24
+ resp = delete_model(del_model)
25
+ return resp
26
+
27
+ # uvicorn server:app --reload
@@ -1,2 +1,5 @@
1
- from .contrast import *
2
- from .prop import *
1
+ from .utils_contrast import *
2
+ from .utils_prop import *
3
+ from .encoder import *
4
+ from .encoder_decoder import *
5
+ from .helper import *
@@ -0,0 +1,206 @@
1
+ import tensorflow as tf
2
+
3
+
4
+ def build_encoder_tree(model, root='bert'):
5
+ # Initialize the tree structure
6
+ ltree = {}
7
+ layer_tree = {}
8
+ inputs = []
9
+ outputs = []
10
+ intermediates = []
11
+
12
+ # Base component setup
13
+ def add_component(tree, name, component, child=None):
14
+ tree[name] = {
15
+ 'name': name,
16
+ 'class': component if type(component).__name__ == 'str' else type(component).__name__,
17
+ 'type': str(type(component)),
18
+ 'parent': None,
19
+ 'child': None
20
+ }
21
+
22
+ if isinstance(child, list):
23
+ tree[name]['child'] = child
24
+ elif isinstance(child, str):
25
+ tree[name]['child'] = [child]
26
+
27
+ if tree[name]['class'] == 'list':
28
+ tree[name]['class'] = [type(item).__name__ for item in component]
29
+ tree[name]['type'] = [str(type(item)) for item in component]
30
+
31
+ # Keep track of component type in a separate dictionary
32
+ layer_tree[name] = component if type(component).__name__ == 'str' else tree[name]['type']
33
+
34
+ # Link the parent to its children
35
+ if isinstance(child, list):
36
+ for ch in child:
37
+ if ch in tree:
38
+ tree[ch]['parent'] = [name]
39
+
40
+ elif isinstance(child, str):
41
+ if child in tree:
42
+ tree[child]['parent'] = [name]
43
+
44
+ return tree[name]
45
+
46
+ # Add root and embeddings component
47
+ embeddings = add_component(ltree, 'embeddings', 'Embeddings', child=None)
48
+
49
+ # Add encoder layers dynamically
50
+ current_child = 'embeddings'
51
+ for i, layer in enumerate(model.bert.encoder.layer):
52
+ attention = add_component(ltree, f'attention_{i}', 'Self_Attention', child=current_child)
53
+ add_and_layer_norm_0 = add_component(ltree, f'add_and_layer_norm_{i}_0', 'Residual', child=[f'attention_{i}', current_child])
54
+ feed_forward = add_component(ltree, f'feed_forward_{i}', 'Feed_Forward', child=f'add_and_layer_norm_{i}_0')
55
+ add_and_layer_norm_1 = add_component(ltree, f'add_and_layer_norm_{i}_1', 'Residual', child=[f'feed_forward_{i}', f'add_and_layer_norm_{i}_0'])
56
+ current_child = f'add_and_layer_norm_{i}_1' # Update current_child to link this layer's output to the next layer's input
57
+
58
+ # Optionally add pooler layer if present
59
+ if hasattr(model.bert, 'pooler'):
60
+ pooler = add_component(ltree, 'pooler', 'Pooler', child=current_child)
61
+ current_child = 'pooler'
62
+
63
+ if hasattr(model, 'classifier'):
64
+ classifier = add_component(ltree, 'classifier', 'Classifier', child=current_child)
65
+ current_child = 'classifier'
66
+
67
+ # Classify components
68
+ for name, component in ltree.items():
69
+ if component['parent'] is None:
70
+ outputs.append(component['name'])
71
+ elif component['child'] is None:
72
+ inputs.append(component['name'])
73
+ else:
74
+ intermediates.append(component['name'])
75
+
76
+ model_resource = {
77
+ "layers": layer_tree,
78
+ "graph": ltree,
79
+ "outputs": outputs,
80
+ "inputs": inputs
81
+ }
82
+
83
+ return model_resource
84
+
85
+
86
+ def extract_encoder_weights(model):
87
+ # Initialize a dictionary to hold the weights
88
+ weights_dict = {
89
+ 'embeddings': {},
90
+ 'pooler': {},
91
+ 'dropout': {},
92
+ 'classifier': {}
93
+ }
94
+
95
+ for i in range(model.config.num_hidden_layers):
96
+ weights_dict[f'attention_{i}'] = {}
97
+ weights_dict[f'add_and_layer_norm_{i}_0'] = {}
98
+ weights_dict[f'feed_forward_{i}'] = {}
99
+ weights_dict[f'add_and_layer_norm_{i}_1'] = {}
100
+
101
+
102
+ for weight in model.weights:
103
+ name = weight.name
104
+ value = weight.numpy()
105
+
106
+ if 'embeddings' in name:
107
+ weights_dict['embeddings'][name] = value
108
+
109
+ elif 'bert/encoder'in name:
110
+ # code here
111
+ layer = name.split('/')[3].split('.')[1][1:]
112
+ submodule = name.split('/')[4]
113
+
114
+ if 'attention' in submodule and 'LayerNorm' not in name:
115
+ weights_dict[f'attention_{layer}'][name] = value
116
+
117
+ elif 'attention/output/LayerNorm' in name:
118
+ weights_dict[f'add_and_layer_norm_{layer}_0'][name] = value
119
+
120
+ elif 'intermediate/dense' in name:
121
+ # code
122
+ weights_dict[f'feed_forward_{layer}'][name] = value
123
+
124
+ elif 'output/dense' in name:
125
+ # code
126
+ weights_dict[f'feed_forward_{layer}'][name] = value
127
+
128
+ elif 'output/LayerNorm' in name:
129
+ # code
130
+ weights_dict[f'add_and_layer_norm_{layer}_1'][name] = value
131
+
132
+ elif 'bert/pooler' in name:
133
+ weights_dict['pooler'][name] = value
134
+
135
+ elif 'classifier' in name:
136
+ weights_dict['classifier'][name] = value
137
+
138
+ return weights_dict
139
+
140
+
141
+ def create_encoder_output(model, input_ids=None, attention_mask=None, token_type_ids=None):
142
+ all_layer_outputs = {}
143
+
144
+ # Embeddings
145
+ embedding_output = model.bert.embeddings(input_ids=input_ids, token_type_ids=token_type_ids)
146
+ all_layer_outputs['embeddings'] = embedding_output
147
+
148
+ # iterate over each layer
149
+ hidden_states = embedding_output
150
+
151
+ # Cast attention mask to float32
152
+ if attention_mask is not None:
153
+ attention_mask = tf.cast(attention_mask, tf.float32)
154
+
155
+ for i, layer_module in enumerate(model.bert.encoder.layer):
156
+ # Self-Attention and attention output
157
+ attention_output = layer_module.attention.self_attention(
158
+ hidden_states,
159
+ attention_mask=attention_mask,
160
+ head_mask=None,
161
+ encoder_hidden_states=None,
162
+ encoder_attention_mask=None,
163
+ past_key_value=None,
164
+ output_attentions=False,
165
+ )[0]
166
+
167
+ # Add + Layer Norm after attention
168
+ attention_output = layer_module.attention.dense_output.dense(attention_output)
169
+ attention_output = layer_module.attention.dense_output.dropout(attention_output)
170
+ residual_attention_output = attention_output + hidden_states
171
+ attention_output_norm = layer_module.attention.dense_output.LayerNorm(residual_attention_output)
172
+
173
+ # Feed Forward (Intermediate)
174
+ intermediate_output = layer_module.intermediate(attention_output_norm)
175
+
176
+ # Feed Forward Output
177
+ feed_forward_output = layer_module.bert_output.dense(intermediate_output)
178
+ feed_forward_output = layer_module.bert_output.dropout(feed_forward_output)
179
+ residual_feed_forward_output = feed_forward_output + attention_output_norm
180
+ feed_forward_output_norm = layer_module.bert_output.LayerNorm(residual_feed_forward_output)
181
+
182
+
183
+ # Save outputs add_and_layer_norm_0_0
184
+ all_layer_outputs[f'attention_{i}'] = attention_output
185
+ all_layer_outputs[f'add_and_layer_norm_{i}_0'] = attention_output_norm
186
+ all_layer_outputs[f'feed_forward_{i}'] = feed_forward_output
187
+ all_layer_outputs[f'add_and_layer_norm_{i}_1'] = feed_forward_output_norm
188
+
189
+ # Update hidden states for the next layer
190
+ hidden_states = feed_forward_output_norm
191
+
192
+ # Pooler
193
+ if hasattr(model.bert, 'pooler'):
194
+ pooled_output = model.bert.pooler(hidden_states)
195
+ all_layer_outputs['pooler'] = pooled_output
196
+
197
+ if hasattr(model, 'dropout'):
198
+ dropout_output = model.dropout(pooled_output)
199
+ all_layer_outputs['dropout'] = dropout_output
200
+
201
+ if hasattr(model, 'classifier'):
202
+ classifier = model.classifier(dropout_output)
203
+ softmax_output = tf.nn.softmax(classifier)
204
+ all_layer_outputs['classifier'] = softmax_output
205
+
206
+ return all_layer_outputs