dl-backtrace 0.0.16.dev4__tar.gz → 0.0.18__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dl-backtrace might be problematic. Click here for more details.
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/.github/scripts/release.py +1 -1
- dl_backtrace-0.0.18/PKG-INFO +173 -0
- dl_backtrace-0.0.18/README.md +156 -0
- dl_backtrace-0.0.18/dl_backtrace/old_backtrace/__init__.py +1 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/backtrace.py +5 -3
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/utils/utils_prop.py +53 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/version.py +2 -2
- dl_backtrace-0.0.18/dl_backtrace.egg-info/PKG-INFO +173 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace.egg-info/SOURCES.txt +1 -0
- dl_backtrace-0.0.16.dev4/PKG-INFO +0 -102
- dl_backtrace-0.0.16.dev4/README.md +0 -85
- dl_backtrace-0.0.16.dev4/dl_backtrace.egg-info/PKG-INFO +0 -102
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/.github/workflows/publish.yml +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/.github/workflows/release.yml +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/.gitignore +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/LICENSE +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/MANIFEST.in +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/__init__.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/pytorch_backtrace/__init__.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/pytorch_backtrace/backtrace/__init__.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/pytorch_backtrace/backtrace/backtrace.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/pytorch_backtrace/backtrace/config.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/pytorch_backtrace/backtrace/utils/__init__.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/pytorch_backtrace/backtrace/utils/contrast.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/pytorch_backtrace/backtrace/utils/prop.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/tf_backtrace/__init__.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/tf_backtrace/backtrace/__init__.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/tf_backtrace/backtrace/backtrace.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/tf_backtrace/backtrace/config.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/tf_backtrace/backtrace/utils/__init__.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/tf_backtrace/backtrace/utils/contrast.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/tf_backtrace/backtrace/utils/prop.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/pytorch_backtrace/__init__.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/pytorch_backtrace/backtrace/__init__.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/pytorch_backtrace/backtrace/backtrace.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/pytorch_backtrace/backtrace/config.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/pytorch_backtrace/backtrace/utils/__init__.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/pytorch_backtrace/backtrace/utils/contrast.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/pytorch_backtrace/backtrace/utils/encoder.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/pytorch_backtrace/backtrace/utils/encoder_decoder.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/pytorch_backtrace/backtrace/utils/helper.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/pytorch_backtrace/backtrace/utils/prop.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/__init__.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/__init__.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/activation_info.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/models.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/server.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/utils/__init__.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/utils/encoder.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/utils/encoder_decoder.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/utils/helper.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/utils/utils_contrast.py +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace.egg-info/dependency_links.txt +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace.egg-info/requires.txt +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace.egg-info/top_level.txt +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/pyproject.toml +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/requirements.txt +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/setup.cfg +0 -0
- {dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/setup.py +0 -0
|
@@ -41,7 +41,7 @@ def create_new_patch_release():
|
|
|
41
41
|
new_version_number = bump_patch_number(last_version_number)
|
|
42
42
|
|
|
43
43
|
subprocess.run(
|
|
44
|
-
["gh", "release", "create", "--generate-notes", "0.0.
|
|
44
|
+
["gh", "release", "create", "--generate-notes", "0.0.18"],
|
|
45
45
|
check=True,
|
|
46
46
|
)
|
|
47
47
|
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: dl_backtrace
|
|
3
|
+
Version: 0.0.18
|
|
4
|
+
Summary: A python SDK for Deep Learning Backtrace
|
|
5
|
+
Home-page: https://xai.arya.ai/docs/introduction
|
|
6
|
+
License: MIT
|
|
7
|
+
Keywords: aryaxai deep learning backtrace,ML observability
|
|
8
|
+
Classifier: Intended Audience :: Developers
|
|
9
|
+
Classifier: Intended Audience :: Science/Research
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Operating System :: OS Independent
|
|
13
|
+
Requires-Python: >=3.0
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
License-File: LICENSE
|
|
16
|
+
Requires-Dist: tensorflow==2.14.0
|
|
17
|
+
|
|
18
|
+
# AryaXai-Backtrace
|
|
19
|
+
Backtrace module for Generating Explainability on Deep learning models using TensorFlow / Pytorch
|
|
20
|
+
|
|
21
|
+
# Backtrace Module
|
|
22
|
+
[](LICENSE)
|
|
23
|
+
|
|
24
|
+
## Overview
|
|
25
|
+
|
|
26
|
+
The Backtrace Module is a powerful and patent-pending algorithm developed by AryaXAI for enhancing the explainability of AI models, particularly in the context of complex techniques like deep learning.
|
|
27
|
+
|
|
28
|
+
## Features
|
|
29
|
+
|
|
30
|
+
- **Explainability:** Gain deep insights into your AI models by using the Backtrace algorithm, providing multiple explanations for their decisions.
|
|
31
|
+
|
|
32
|
+
- **Consistency:** Ensure consistent and accurate explanations across different scenarios and use cases.
|
|
33
|
+
|
|
34
|
+
- **Mission-Critical Support:** Tailored for mission-critical AI use cases where transparency is paramount.
|
|
35
|
+
|
|
36
|
+
## Installation
|
|
37
|
+
|
|
38
|
+
To integrate the Backtrace Module into your project, follow these simple steps:
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
pip install dl-backtrace
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Usage
|
|
45
|
+
|
|
46
|
+
### Tensoflow-Keras based models
|
|
47
|
+
|
|
48
|
+
```python
|
|
49
|
+
from dl_backtrace.tf_backtrace import Backtrace as B
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
### Pytorch based models
|
|
53
|
+
|
|
54
|
+
```python
|
|
55
|
+
from dl_backtrace.pytorch_backtrace import Backtrace as B
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
### Evalauting using Backtrace:
|
|
59
|
+
|
|
60
|
+
1. Step - 1: Initialize a Backtrace Object using your Model
|
|
61
|
+
```python
|
|
62
|
+
backtrace = B(model=model)
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
2. Step - 2: Calculate layer-wise output using a data instance
|
|
66
|
+
|
|
67
|
+
```python
|
|
68
|
+
layer_outputs = backtrace.predict(test_data[0])
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
3. Step - 3: Calculate layer-wise Relevance using Evaluation
|
|
72
|
+
```python
|
|
73
|
+
relevance = backtrace.eval(layer_outputs,mode='default',scaler=1,thresholding=0.5,task="binary-classification")
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
#### Depending on Task we have several attributes for Relevance Calculation in Evalaution:
|
|
77
|
+
|
|
78
|
+
| Attribute | Description | Values |
|
|
79
|
+
|--------------|-------------|--------|
|
|
80
|
+
| mode | evaluation mode of algorithm | { default, contrastive}|
|
|
81
|
+
| scaler | Total / Starting Relevance at the Last Layer | Integer ( Default: None, Preferred: 1)|
|
|
82
|
+
| thresholding | Thresholding Model Prediction in Segemntation Task to select Pixels predicting the actual class. (Only works in Segmentation Tasks) | Default:0.5 |
|
|
83
|
+
| task | The task of the Model | { binary-classification, multi-class classification, bbox-regression, binary-segmentation} |
|
|
84
|
+
| model-type | Type of the Model | {Encoder/ Encoder_Decoder} |
|
|
85
|
+
|
|
86
|
+
## Example Notebooks :
|
|
87
|
+
|
|
88
|
+
### Tensorflow-Keras :
|
|
89
|
+
|
|
90
|
+
| Name | Task | Link |
|
|
91
|
+
|-------------|-------------|-------------------------------|
|
|
92
|
+
| Backtrace Loan Classification Tabular Dataset | Binary Classification | [Colab Link](https://colab.research.google.com/drive/1H5jaryVPEAQuqk9XPP71UIL4cemli98K?usp=sharing) |
|
|
93
|
+
| Backtrace Image FMNIST Dataset | Multi-Class Classification | [Colab Link](https://colab.research.google.com/drive/1BZsdo7IWYGhdy0Pg_m8r7c3COczuW_tG?usp=sharing) |
|
|
94
|
+
| Backtrace CUB Bounding Box Regression Image Dataset | Single Object Detection | [Colab Link](https://colab.research.google.com/drive/15mmJ2aGt-_Ho7RdPWjNEEoFXE9mu9HLV?usp=sharing) |
|
|
95
|
+
| Backtrace Next Word Generation Textual Dataset | Next Word Generation | [Colab Link](https://colab.research.google.com/drive/1iRMMcEm4iMVuk236vDtEB6rKsirny8cB?usp=sharing) |
|
|
96
|
+
| Backtrace ImDB Sentiment Classification Textual Dataset | Sentiment Classification | [Colab Link](https://colab.research.google.com/drive/1L5nEMO6H8pbGo1Opd9S4mbYPMmq5Vl-M?usp=sharing)|
|
|
97
|
+
| Backtrace Binary Classification Textual Dataset | Binary Classification | [Colab Link](https://colab.research.google.com/drive/1PxFY4hEhcIr4nTVyfwLE_29CzQTCD3dA?usp=sharing) |
|
|
98
|
+
| Backtrace Multi-Class NewsGroup20 Classification Textual Dataset | Multi-Class Classification | [Colab Link](https://colab.research.google.com/drive/1u3B18TZwfTdYJeYBGcHQ0T3fzfM2USGT?usp=sharing) |
|
|
99
|
+
| Backtrace CVC-ClinicDB Colonoscopy Binary Segmentation | Organ Segmentation | [Colab Link](https://colab.research.google.com/drive/1cUNUao7fahDgndVI-cpn2iSByTiWaB4j?usp=sharing) |
|
|
100
|
+
| Backtrace CamVid Road Car Binary Segmentation | Binary Segmentation | [Colab Link](https://colab.research.google.com/drive/1OAY7aAraKq_ucyVt5AYPBD8LkQOIuy1C?usp=sharing) |
|
|
101
|
+
| Backtrace Transformer Encoder for Sentiment Analysis | Binary Classification | [Colab Link](https://colab.research.google.com/drive/1H7-4ox3YWMtoH0vptYGXaN63PRJFbTrX?usp=sharing) |
|
|
102
|
+
| Backtrace Transformer Encoder-Decoder Model for Neural Machine Translation | Neural Machine Translation | [Colab Link](https://colab.research.google.com/drive/1NApbrd11TEqlrqGCBYPmgMvBbZBJhpWD?usp=sharing) |
|
|
103
|
+
| Backtrace Transformer Encoder-Decoder Model for Text Summarization | Text Summarization | [Colab Link](https://colab.research.google.com/drive/18CPNnEJzGlCPJ2sSXX4mArAzK1NLe9Lj?usp=sharing) |
|
|
104
|
+
|
|
105
|
+
### Pytorch :
|
|
106
|
+
| Name | Task | Link |
|
|
107
|
+
|-------------|-------------|-------------------------------|
|
|
108
|
+
| Backtrace Tabular Dataset | Binary Classification | [Colab Link](https://colab.research.google.com/drive/1_r-IS7aIuATSvGNRLk8VDVVLkDSaKCpD?usp=sharing)|
|
|
109
|
+
| Backtrace Image Dataset | Multi-Class Classification | [Colab Link](https://colab.research.google.com/drive/1v2XajWtIbf7Vt31Z1fnKnAjyiDzPxwnU?usp=sharing) |
|
|
110
|
+
|
|
111
|
+
For more detailed examples and use cases, check out our documentation.
|
|
112
|
+
|
|
113
|
+
## Supported Layers and Future Work :
|
|
114
|
+
|
|
115
|
+
### Tensorflow-Keras:
|
|
116
|
+
|
|
117
|
+
- [x] Dense (Fully Connected) Layer
|
|
118
|
+
- [x] Convolutional Layer (Conv2D,Conv1D)
|
|
119
|
+
- [x] Transpose Convolutional Layer (Conv2DTranspose,Conv1DTranspose)
|
|
120
|
+
- [x] Reshape Layer
|
|
121
|
+
- [x] Flatten Layer
|
|
122
|
+
- [x] Global Max Pooling (2D & 1D) Layer
|
|
123
|
+
- [x] Global Average Pooling (2D & 1D) Layer
|
|
124
|
+
- [x] Max Pooling (2D & 1D) Layer
|
|
125
|
+
- [x] Average Pooling (2D & 1D) Layer
|
|
126
|
+
- [x] Concatenate Layer
|
|
127
|
+
- [x] Add Layer
|
|
128
|
+
- [x] Long Short-Term Memory (LSTM) Layer
|
|
129
|
+
- [x] Dropout Layer
|
|
130
|
+
- [x] Embedding Layer
|
|
131
|
+
- [x] TextVectorization Layer
|
|
132
|
+
- [x] Self-Attention Layer
|
|
133
|
+
- [x] Cross-Attention Layer
|
|
134
|
+
- [x] Feed-Forward Layer
|
|
135
|
+
- [x] Pooler Layer
|
|
136
|
+
- [x] Decoder LM (Language Model) Head
|
|
137
|
+
- [ ] Other Custom Layers
|
|
138
|
+
|
|
139
|
+
### Pytorch :
|
|
140
|
+
|
|
141
|
+
(Note: Currently we only Support Binary and Multi-Class Classification in Pytorch, Segmentation and Single Object Detection will be supported in the next release.)
|
|
142
|
+
|
|
143
|
+
- [x] Linear (Fully Connected) Layer
|
|
144
|
+
- [x] Convolutional Layer (Conv2D)
|
|
145
|
+
- [x] Reshape Layer
|
|
146
|
+
- [x] Flatten Layer
|
|
147
|
+
- [x] Global Average Pooling 2D Layer (AdaptiveAvgPool2d)
|
|
148
|
+
- [x] Max Pooling 2D Layer (MaxPool2d)
|
|
149
|
+
- [x] Average Pooling 2D Layer (AvgPool2d)
|
|
150
|
+
- [x] Concatenate Layer
|
|
151
|
+
- [x] Add Layer
|
|
152
|
+
- [x] Long Short-Term Memory (LSTM) Layer
|
|
153
|
+
- [ ] Dropout Layer
|
|
154
|
+
- [ ] Embedding Layer
|
|
155
|
+
- [ ] EmbeddingBag Layer
|
|
156
|
+
- [ ] 1d Convolution Layer (Conv1d)
|
|
157
|
+
- [ ] 1d Pooling Layers (AvgPool1d,MaxPool1d,AdaptiveAvgPool1d,AdaptiveMaxPool1d)
|
|
158
|
+
- [ ] Transpose Convolution Layers (ConvTranspose2d,ConvTranspose1d)
|
|
159
|
+
- [ ] Global Max Pooling 2D Layer (AdaptiveMaxPool2d)
|
|
160
|
+
- [ ] Other Custom Layers
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
## Getting Started
|
|
164
|
+
If you are new to Backtrace, head over to our Getting Started Guide to quickly set up and use the module in your projects.
|
|
165
|
+
|
|
166
|
+
## Contributing
|
|
167
|
+
We welcome contributions from the community. To contribute, please follow our Contribution Guidelines.
|
|
168
|
+
|
|
169
|
+
## License
|
|
170
|
+
This project is licensed under the MIT License - see the LICENSE file for details.
|
|
171
|
+
|
|
172
|
+
## Contact
|
|
173
|
+
For any inquiries or support, please contact AryaXAI Support.
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
# AryaXai-Backtrace
|
|
2
|
+
Backtrace module for Generating Explainability on Deep learning models using TensorFlow / Pytorch
|
|
3
|
+
|
|
4
|
+
# Backtrace Module
|
|
5
|
+
[](LICENSE)
|
|
6
|
+
|
|
7
|
+
## Overview
|
|
8
|
+
|
|
9
|
+
The Backtrace Module is a powerful and patent-pending algorithm developed by AryaXAI for enhancing the explainability of AI models, particularly in the context of complex techniques like deep learning.
|
|
10
|
+
|
|
11
|
+
## Features
|
|
12
|
+
|
|
13
|
+
- **Explainability:** Gain deep insights into your AI models by using the Backtrace algorithm, providing multiple explanations for their decisions.
|
|
14
|
+
|
|
15
|
+
- **Consistency:** Ensure consistent and accurate explanations across different scenarios and use cases.
|
|
16
|
+
|
|
17
|
+
- **Mission-Critical Support:** Tailored for mission-critical AI use cases where transparency is paramount.
|
|
18
|
+
|
|
19
|
+
## Installation
|
|
20
|
+
|
|
21
|
+
To integrate the Backtrace Module into your project, follow these simple steps:
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
pip install dl-backtrace
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
## Usage
|
|
28
|
+
|
|
29
|
+
### Tensoflow-Keras based models
|
|
30
|
+
|
|
31
|
+
```python
|
|
32
|
+
from dl_backtrace.tf_backtrace import Backtrace as B
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
### Pytorch based models
|
|
36
|
+
|
|
37
|
+
```python
|
|
38
|
+
from dl_backtrace.pytorch_backtrace import Backtrace as B
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
### Evalauting using Backtrace:
|
|
42
|
+
|
|
43
|
+
1. Step - 1: Initialize a Backtrace Object using your Model
|
|
44
|
+
```python
|
|
45
|
+
backtrace = B(model=model)
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
2. Step - 2: Calculate layer-wise output using a data instance
|
|
49
|
+
|
|
50
|
+
```python
|
|
51
|
+
layer_outputs = backtrace.predict(test_data[0])
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
3. Step - 3: Calculate layer-wise Relevance using Evaluation
|
|
55
|
+
```python
|
|
56
|
+
relevance = backtrace.eval(layer_outputs,mode='default',scaler=1,thresholding=0.5,task="binary-classification")
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
#### Depending on Task we have several attributes for Relevance Calculation in Evalaution:
|
|
60
|
+
|
|
61
|
+
| Attribute | Description | Values |
|
|
62
|
+
|--------------|-------------|--------|
|
|
63
|
+
| mode | evaluation mode of algorithm | { default, contrastive}|
|
|
64
|
+
| scaler | Total / Starting Relevance at the Last Layer | Integer ( Default: None, Preferred: 1)|
|
|
65
|
+
| thresholding | Thresholding Model Prediction in Segemntation Task to select Pixels predicting the actual class. (Only works in Segmentation Tasks) | Default:0.5 |
|
|
66
|
+
| task | The task of the Model | { binary-classification, multi-class classification, bbox-regression, binary-segmentation} |
|
|
67
|
+
| model-type | Type of the Model | {Encoder/ Encoder_Decoder} |
|
|
68
|
+
|
|
69
|
+
## Example Notebooks :
|
|
70
|
+
|
|
71
|
+
### Tensorflow-Keras :
|
|
72
|
+
|
|
73
|
+
| Name | Task | Link |
|
|
74
|
+
|-------------|-------------|-------------------------------|
|
|
75
|
+
| Backtrace Loan Classification Tabular Dataset | Binary Classification | [Colab Link](https://colab.research.google.com/drive/1H5jaryVPEAQuqk9XPP71UIL4cemli98K?usp=sharing) |
|
|
76
|
+
| Backtrace Image FMNIST Dataset | Multi-Class Classification | [Colab Link](https://colab.research.google.com/drive/1BZsdo7IWYGhdy0Pg_m8r7c3COczuW_tG?usp=sharing) |
|
|
77
|
+
| Backtrace CUB Bounding Box Regression Image Dataset | Single Object Detection | [Colab Link](https://colab.research.google.com/drive/15mmJ2aGt-_Ho7RdPWjNEEoFXE9mu9HLV?usp=sharing) |
|
|
78
|
+
| Backtrace Next Word Generation Textual Dataset | Next Word Generation | [Colab Link](https://colab.research.google.com/drive/1iRMMcEm4iMVuk236vDtEB6rKsirny8cB?usp=sharing) |
|
|
79
|
+
| Backtrace ImDB Sentiment Classification Textual Dataset | Sentiment Classification | [Colab Link](https://colab.research.google.com/drive/1L5nEMO6H8pbGo1Opd9S4mbYPMmq5Vl-M?usp=sharing)|
|
|
80
|
+
| Backtrace Binary Classification Textual Dataset | Binary Classification | [Colab Link](https://colab.research.google.com/drive/1PxFY4hEhcIr4nTVyfwLE_29CzQTCD3dA?usp=sharing) |
|
|
81
|
+
| Backtrace Multi-Class NewsGroup20 Classification Textual Dataset | Multi-Class Classification | [Colab Link](https://colab.research.google.com/drive/1u3B18TZwfTdYJeYBGcHQ0T3fzfM2USGT?usp=sharing) |
|
|
82
|
+
| Backtrace CVC-ClinicDB Colonoscopy Binary Segmentation | Organ Segmentation | [Colab Link](https://colab.research.google.com/drive/1cUNUao7fahDgndVI-cpn2iSByTiWaB4j?usp=sharing) |
|
|
83
|
+
| Backtrace CamVid Road Car Binary Segmentation | Binary Segmentation | [Colab Link](https://colab.research.google.com/drive/1OAY7aAraKq_ucyVt5AYPBD8LkQOIuy1C?usp=sharing) |
|
|
84
|
+
| Backtrace Transformer Encoder for Sentiment Analysis | Binary Classification | [Colab Link](https://colab.research.google.com/drive/1H7-4ox3YWMtoH0vptYGXaN63PRJFbTrX?usp=sharing) |
|
|
85
|
+
| Backtrace Transformer Encoder-Decoder Model for Neural Machine Translation | Neural Machine Translation | [Colab Link](https://colab.research.google.com/drive/1NApbrd11TEqlrqGCBYPmgMvBbZBJhpWD?usp=sharing) |
|
|
86
|
+
| Backtrace Transformer Encoder-Decoder Model for Text Summarization | Text Summarization | [Colab Link](https://colab.research.google.com/drive/18CPNnEJzGlCPJ2sSXX4mArAzK1NLe9Lj?usp=sharing) |
|
|
87
|
+
|
|
88
|
+
### Pytorch :
|
|
89
|
+
| Name | Task | Link |
|
|
90
|
+
|-------------|-------------|-------------------------------|
|
|
91
|
+
| Backtrace Tabular Dataset | Binary Classification | [Colab Link](https://colab.research.google.com/drive/1_r-IS7aIuATSvGNRLk8VDVVLkDSaKCpD?usp=sharing)|
|
|
92
|
+
| Backtrace Image Dataset | Multi-Class Classification | [Colab Link](https://colab.research.google.com/drive/1v2XajWtIbf7Vt31Z1fnKnAjyiDzPxwnU?usp=sharing) |
|
|
93
|
+
|
|
94
|
+
For more detailed examples and use cases, check out our documentation.
|
|
95
|
+
|
|
96
|
+
## Supported Layers and Future Work :
|
|
97
|
+
|
|
98
|
+
### Tensorflow-Keras:
|
|
99
|
+
|
|
100
|
+
- [x] Dense (Fully Connected) Layer
|
|
101
|
+
- [x] Convolutional Layer (Conv2D,Conv1D)
|
|
102
|
+
- [x] Transpose Convolutional Layer (Conv2DTranspose,Conv1DTranspose)
|
|
103
|
+
- [x] Reshape Layer
|
|
104
|
+
- [x] Flatten Layer
|
|
105
|
+
- [x] Global Max Pooling (2D & 1D) Layer
|
|
106
|
+
- [x] Global Average Pooling (2D & 1D) Layer
|
|
107
|
+
- [x] Max Pooling (2D & 1D) Layer
|
|
108
|
+
- [x] Average Pooling (2D & 1D) Layer
|
|
109
|
+
- [x] Concatenate Layer
|
|
110
|
+
- [x] Add Layer
|
|
111
|
+
- [x] Long Short-Term Memory (LSTM) Layer
|
|
112
|
+
- [x] Dropout Layer
|
|
113
|
+
- [x] Embedding Layer
|
|
114
|
+
- [x] TextVectorization Layer
|
|
115
|
+
- [x] Self-Attention Layer
|
|
116
|
+
- [x] Cross-Attention Layer
|
|
117
|
+
- [x] Feed-Forward Layer
|
|
118
|
+
- [x] Pooler Layer
|
|
119
|
+
- [x] Decoder LM (Language Model) Head
|
|
120
|
+
- [ ] Other Custom Layers
|
|
121
|
+
|
|
122
|
+
### Pytorch :
|
|
123
|
+
|
|
124
|
+
(Note: Currently we only Support Binary and Multi-Class Classification in Pytorch, Segmentation and Single Object Detection will be supported in the next release.)
|
|
125
|
+
|
|
126
|
+
- [x] Linear (Fully Connected) Layer
|
|
127
|
+
- [x] Convolutional Layer (Conv2D)
|
|
128
|
+
- [x] Reshape Layer
|
|
129
|
+
- [x] Flatten Layer
|
|
130
|
+
- [x] Global Average Pooling 2D Layer (AdaptiveAvgPool2d)
|
|
131
|
+
- [x] Max Pooling 2D Layer (MaxPool2d)
|
|
132
|
+
- [x] Average Pooling 2D Layer (AvgPool2d)
|
|
133
|
+
- [x] Concatenate Layer
|
|
134
|
+
- [x] Add Layer
|
|
135
|
+
- [x] Long Short-Term Memory (LSTM) Layer
|
|
136
|
+
- [ ] Dropout Layer
|
|
137
|
+
- [ ] Embedding Layer
|
|
138
|
+
- [ ] EmbeddingBag Layer
|
|
139
|
+
- [ ] 1d Convolution Layer (Conv1d)
|
|
140
|
+
- [ ] 1d Pooling Layers (AvgPool1d,MaxPool1d,AdaptiveAvgPool1d,AdaptiveMaxPool1d)
|
|
141
|
+
- [ ] Transpose Convolution Layers (ConvTranspose2d,ConvTranspose1d)
|
|
142
|
+
- [ ] Global Max Pooling 2D Layer (AdaptiveMaxPool2d)
|
|
143
|
+
- [ ] Other Custom Layers
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
## Getting Started
|
|
147
|
+
If you are new to Backtrace, head over to our Getting Started Guide to quickly set up and use the module in your projects.
|
|
148
|
+
|
|
149
|
+
## Contributing
|
|
150
|
+
We welcome contributions from the community. To contribute, please follow our Contribution Guidelines.
|
|
151
|
+
|
|
152
|
+
## License
|
|
153
|
+
This project is licensed under the MIT License - see the LICENSE file for details.
|
|
154
|
+
|
|
155
|
+
## Contact
|
|
156
|
+
For any inquiries or support, please contact AryaXAI Support.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
{dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/backtrace.py
RENAMED
|
@@ -51,6 +51,7 @@ class Backtrace(object):
|
|
|
51
51
|
self.model_weights = EN.extract_encoder_weights(model)
|
|
52
52
|
# # calculate the output of each submodule of the encoder model
|
|
53
53
|
# self.all_out_model = EN.create_encoder_output(model)
|
|
54
|
+
self.activation_dict = None
|
|
54
55
|
elif model_type == 'encoder_decoder':
|
|
55
56
|
# create a tree-like structure and layer_stack for encoder-decoder model
|
|
56
57
|
self.model_resource, self.layer_stack = ED.build_enc_dec_tree(model)
|
|
@@ -58,6 +59,7 @@ class Backtrace(object):
|
|
|
58
59
|
self.model_weights = ED.extract_encoder_decoder_weights(model)
|
|
59
60
|
# # calculate the output of each submodule of the encoder-decoder model
|
|
60
61
|
# self.all_out_model = ED.calculate_encoder_decoder_output(model)
|
|
62
|
+
self.activation_dict = None
|
|
61
63
|
|
|
62
64
|
else:
|
|
63
65
|
self.create_tree(model.layers)
|
|
@@ -227,7 +229,7 @@ class Backtrace(object):
|
|
|
227
229
|
all_wt = {}
|
|
228
230
|
if len(start_wt) == 0:
|
|
229
231
|
if self.model_type == 'encoder':
|
|
230
|
-
start_wt = UP.
|
|
232
|
+
start_wt = UP.calculate_start_wt(all_out[out_layer])
|
|
231
233
|
all_wt[out_layer] = start_wt * multiplier
|
|
232
234
|
layer_stack = self.layer_stack
|
|
233
235
|
all_wts = self.model_weights
|
|
@@ -439,7 +441,7 @@ class Backtrace(object):
|
|
|
439
441
|
all_wt[child_nodes[0]] = all_wt[child_nodes[0]] + temp_wt.sum()
|
|
440
442
|
elif model_resource["graph"][start_layer]["class"] == "Self_Attention":
|
|
441
443
|
weights = all_wts[start_layer]
|
|
442
|
-
self_attention_weights = HP.
|
|
444
|
+
self_attention_weights = HP.rename_self_attention_keys(weights)
|
|
443
445
|
temp_wt = UP.calculate_wt_self_attention(
|
|
444
446
|
all_wt[start_layer],
|
|
445
447
|
all_out[child_nodes[0]][0],
|
|
@@ -447,7 +449,7 @@ class Backtrace(object):
|
|
|
447
449
|
)
|
|
448
450
|
all_wt[child_nodes[0]] += temp_wt
|
|
449
451
|
elif model_resource["graph"][start_layer]["class"] == 'Residual':
|
|
450
|
-
temp_wt = UP.
|
|
452
|
+
temp_wt = UP.calculate_wt_residual(
|
|
451
453
|
all_wt[start_layer],
|
|
452
454
|
[all_out[ch] for ch in child_nodes],
|
|
453
455
|
)
|
|
@@ -1108,6 +1108,59 @@ def stabilize(matrix, epsilon=1e-6):
|
|
|
1108
1108
|
return matrix + epsilon * np.sign(matrix)
|
|
1109
1109
|
|
|
1110
1110
|
|
|
1111
|
+
def calculate_wt_residual(wts, inp=None):
|
|
1112
|
+
if isinstance(wts, tf.Tensor):
|
|
1113
|
+
wts = wts.numpy()
|
|
1114
|
+
inp = [i.numpy() if isinstance(i, tf.Tensor) else i for i in inp]
|
|
1115
|
+
|
|
1116
|
+
wt_mat = []
|
|
1117
|
+
inp_list = []
|
|
1118
|
+
expanded_wts = as_strided(
|
|
1119
|
+
wts,
|
|
1120
|
+
shape=(np.prod(wts.shape),),
|
|
1121
|
+
strides=(wts.strides[-1],),
|
|
1122
|
+
writeable=False, # totally use this to avoid writing to memory in weird places
|
|
1123
|
+
)
|
|
1124
|
+
|
|
1125
|
+
for x in inp:
|
|
1126
|
+
expanded_input = as_strided(
|
|
1127
|
+
x,
|
|
1128
|
+
shape=(np.prod(x.shape),),
|
|
1129
|
+
strides=(x.strides[-1],),
|
|
1130
|
+
writeable=False, # totally use this to avoid writing to memory in weird places
|
|
1131
|
+
)
|
|
1132
|
+
inp_list.append(expanded_input)
|
|
1133
|
+
wt_mat.append(np.zeros_like(expanded_input))
|
|
1134
|
+
|
|
1135
|
+
wt_mat = np.array(wt_mat)
|
|
1136
|
+
inp_list = np.array(inp_list)
|
|
1137
|
+
|
|
1138
|
+
for i in range(wt_mat.shape[1]):
|
|
1139
|
+
wt_ind1 = wt_mat[:, i]
|
|
1140
|
+
wt = expanded_wts[i]
|
|
1141
|
+
l1_ind1 = inp_list[:, i]
|
|
1142
|
+
p_ind = l1_ind1 > 0
|
|
1143
|
+
n_ind = l1_ind1 < 0
|
|
1144
|
+
p_sum = np.sum(l1_ind1[p_ind])
|
|
1145
|
+
n_sum = np.sum(l1_ind1[n_ind]) * -1
|
|
1146
|
+
t_sum = p_sum - n_sum
|
|
1147
|
+
p_agg_wt = 0
|
|
1148
|
+
n_agg_wt = 0
|
|
1149
|
+
if p_sum + n_sum > 0:
|
|
1150
|
+
p_agg_wt = p_sum / (p_sum + n_sum)
|
|
1151
|
+
n_agg_wt = n_sum / (p_sum + n_sum)
|
|
1152
|
+
if p_sum == 0:
|
|
1153
|
+
p_sum = 1
|
|
1154
|
+
if n_sum == 0:
|
|
1155
|
+
n_sum = 1
|
|
1156
|
+
wt_ind1[p_ind] = (l1_ind1[p_ind] / p_sum) * wt * p_agg_wt
|
|
1157
|
+
wt_ind1[n_ind] = (l1_ind1[n_ind] / n_sum) * wt * n_agg_wt * -1.0
|
|
1158
|
+
wt_mat[:, i] = wt_ind1
|
|
1159
|
+
|
|
1160
|
+
wt_mat = [i.reshape(wts.shape) for i in list(wt_mat)]
|
|
1161
|
+
return wt_mat
|
|
1162
|
+
|
|
1163
|
+
|
|
1111
1164
|
def calculate_relevance_V(wts, value_output):
|
|
1112
1165
|
# Initialize wt_mat with zeros
|
|
1113
1166
|
wt_mat_V = np.zeros((wts.shape[0], wts.shape[1], *value_output.shape))
|
|
@@ -12,5 +12,5 @@ __version__: str
|
|
|
12
12
|
__version_tuple__: VERSION_TUPLE
|
|
13
13
|
version_tuple: VERSION_TUPLE
|
|
14
14
|
|
|
15
|
-
__version__ = version = '0.0.
|
|
16
|
-
__version_tuple__ = version_tuple = (0, 0,
|
|
15
|
+
__version__ = version = '0.0.18'
|
|
16
|
+
__version_tuple__ = version_tuple = (0, 0, 18)
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: dl_backtrace
|
|
3
|
+
Version: 0.0.18
|
|
4
|
+
Summary: A python SDK for Deep Learning Backtrace
|
|
5
|
+
Home-page: https://xai.arya.ai/docs/introduction
|
|
6
|
+
License: MIT
|
|
7
|
+
Keywords: aryaxai deep learning backtrace,ML observability
|
|
8
|
+
Classifier: Intended Audience :: Developers
|
|
9
|
+
Classifier: Intended Audience :: Science/Research
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Operating System :: OS Independent
|
|
13
|
+
Requires-Python: >=3.0
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
License-File: LICENSE
|
|
16
|
+
Requires-Dist: tensorflow==2.14.0
|
|
17
|
+
|
|
18
|
+
# AryaXai-Backtrace
|
|
19
|
+
Backtrace module for Generating Explainability on Deep learning models using TensorFlow / Pytorch
|
|
20
|
+
|
|
21
|
+
# Backtrace Module
|
|
22
|
+
[](LICENSE)
|
|
23
|
+
|
|
24
|
+
## Overview
|
|
25
|
+
|
|
26
|
+
The Backtrace Module is a powerful and patent-pending algorithm developed by AryaXAI for enhancing the explainability of AI models, particularly in the context of complex techniques like deep learning.
|
|
27
|
+
|
|
28
|
+
## Features
|
|
29
|
+
|
|
30
|
+
- **Explainability:** Gain deep insights into your AI models by using the Backtrace algorithm, providing multiple explanations for their decisions.
|
|
31
|
+
|
|
32
|
+
- **Consistency:** Ensure consistent and accurate explanations across different scenarios and use cases.
|
|
33
|
+
|
|
34
|
+
- **Mission-Critical Support:** Tailored for mission-critical AI use cases where transparency is paramount.
|
|
35
|
+
|
|
36
|
+
## Installation
|
|
37
|
+
|
|
38
|
+
To integrate the Backtrace Module into your project, follow these simple steps:
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
pip install dl-backtrace
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Usage
|
|
45
|
+
|
|
46
|
+
### Tensoflow-Keras based models
|
|
47
|
+
|
|
48
|
+
```python
|
|
49
|
+
from dl_backtrace.tf_backtrace import Backtrace as B
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
### Pytorch based models
|
|
53
|
+
|
|
54
|
+
```python
|
|
55
|
+
from dl_backtrace.pytorch_backtrace import Backtrace as B
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
### Evalauting using Backtrace:
|
|
59
|
+
|
|
60
|
+
1. Step - 1: Initialize a Backtrace Object using your Model
|
|
61
|
+
```python
|
|
62
|
+
backtrace = B(model=model)
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
2. Step - 2: Calculate layer-wise output using a data instance
|
|
66
|
+
|
|
67
|
+
```python
|
|
68
|
+
layer_outputs = backtrace.predict(test_data[0])
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
3. Step - 3: Calculate layer-wise Relevance using Evaluation
|
|
72
|
+
```python
|
|
73
|
+
relevance = backtrace.eval(layer_outputs,mode='default',scaler=1,thresholding=0.5,task="binary-classification")
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
#### Depending on Task we have several attributes for Relevance Calculation in Evalaution:
|
|
77
|
+
|
|
78
|
+
| Attribute | Description | Values |
|
|
79
|
+
|--------------|-------------|--------|
|
|
80
|
+
| mode | evaluation mode of algorithm | { default, contrastive}|
|
|
81
|
+
| scaler | Total / Starting Relevance at the Last Layer | Integer ( Default: None, Preferred: 1)|
|
|
82
|
+
| thresholding | Thresholding Model Prediction in Segemntation Task to select Pixels predicting the actual class. (Only works in Segmentation Tasks) | Default:0.5 |
|
|
83
|
+
| task | The task of the Model | { binary-classification, multi-class classification, bbox-regression, binary-segmentation} |
|
|
84
|
+
| model-type | Type of the Model | {Encoder/ Encoder_Decoder} |
|
|
85
|
+
|
|
86
|
+
## Example Notebooks :
|
|
87
|
+
|
|
88
|
+
### Tensorflow-Keras :
|
|
89
|
+
|
|
90
|
+
| Name | Task | Link |
|
|
91
|
+
|-------------|-------------|-------------------------------|
|
|
92
|
+
| Backtrace Loan Classification Tabular Dataset | Binary Classification | [Colab Link](https://colab.research.google.com/drive/1H5jaryVPEAQuqk9XPP71UIL4cemli98K?usp=sharing) |
|
|
93
|
+
| Backtrace Image FMNIST Dataset | Multi-Class Classification | [Colab Link](https://colab.research.google.com/drive/1BZsdo7IWYGhdy0Pg_m8r7c3COczuW_tG?usp=sharing) |
|
|
94
|
+
| Backtrace CUB Bounding Box Regression Image Dataset | Single Object Detection | [Colab Link](https://colab.research.google.com/drive/15mmJ2aGt-_Ho7RdPWjNEEoFXE9mu9HLV?usp=sharing) |
|
|
95
|
+
| Backtrace Next Word Generation Textual Dataset | Next Word Generation | [Colab Link](https://colab.research.google.com/drive/1iRMMcEm4iMVuk236vDtEB6rKsirny8cB?usp=sharing) |
|
|
96
|
+
| Backtrace ImDB Sentiment Classification Textual Dataset | Sentiment Classification | [Colab Link](https://colab.research.google.com/drive/1L5nEMO6H8pbGo1Opd9S4mbYPMmq5Vl-M?usp=sharing)|
|
|
97
|
+
| Backtrace Binary Classification Textual Dataset | Binary Classification | [Colab Link](https://colab.research.google.com/drive/1PxFY4hEhcIr4nTVyfwLE_29CzQTCD3dA?usp=sharing) |
|
|
98
|
+
| Backtrace Multi-Class NewsGroup20 Classification Textual Dataset | Multi-Class Classification | [Colab Link](https://colab.research.google.com/drive/1u3B18TZwfTdYJeYBGcHQ0T3fzfM2USGT?usp=sharing) |
|
|
99
|
+
| Backtrace CVC-ClinicDB Colonoscopy Binary Segmentation | Organ Segmentation | [Colab Link](https://colab.research.google.com/drive/1cUNUao7fahDgndVI-cpn2iSByTiWaB4j?usp=sharing) |
|
|
100
|
+
| Backtrace CamVid Road Car Binary Segmentation | Binary Segmentation | [Colab Link](https://colab.research.google.com/drive/1OAY7aAraKq_ucyVt5AYPBD8LkQOIuy1C?usp=sharing) |
|
|
101
|
+
| Backtrace Transformer Encoder for Sentiment Analysis | Binary Classification | [Colab Link](https://colab.research.google.com/drive/1H7-4ox3YWMtoH0vptYGXaN63PRJFbTrX?usp=sharing) |
|
|
102
|
+
| Backtrace Transformer Encoder-Decoder Model for Neural Machine Translation | Neural Machine Translation | [Colab Link](https://colab.research.google.com/drive/1NApbrd11TEqlrqGCBYPmgMvBbZBJhpWD?usp=sharing) |
|
|
103
|
+
| Backtrace Transformer Encoder-Decoder Model for Text Summarization | Text Summarization | [Colab Link](https://colab.research.google.com/drive/18CPNnEJzGlCPJ2sSXX4mArAzK1NLe9Lj?usp=sharing) |
|
|
104
|
+
|
|
105
|
+
### Pytorch :
|
|
106
|
+
| Name | Task | Link |
|
|
107
|
+
|-------------|-------------|-------------------------------|
|
|
108
|
+
| Backtrace Tabular Dataset | Binary Classification | [Colab Link](https://colab.research.google.com/drive/1_r-IS7aIuATSvGNRLk8VDVVLkDSaKCpD?usp=sharing)|
|
|
109
|
+
| Backtrace Image Dataset | Multi-Class Classification | [Colab Link](https://colab.research.google.com/drive/1v2XajWtIbf7Vt31Z1fnKnAjyiDzPxwnU?usp=sharing) |
|
|
110
|
+
|
|
111
|
+
For more detailed examples and use cases, check out our documentation.
|
|
112
|
+
|
|
113
|
+
## Supported Layers and Future Work :
|
|
114
|
+
|
|
115
|
+
### Tensorflow-Keras:
|
|
116
|
+
|
|
117
|
+
- [x] Dense (Fully Connected) Layer
|
|
118
|
+
- [x] Convolutional Layer (Conv2D,Conv1D)
|
|
119
|
+
- [x] Transpose Convolutional Layer (Conv2DTranspose,Conv1DTranspose)
|
|
120
|
+
- [x] Reshape Layer
|
|
121
|
+
- [x] Flatten Layer
|
|
122
|
+
- [x] Global Max Pooling (2D & 1D) Layer
|
|
123
|
+
- [x] Global Average Pooling (2D & 1D) Layer
|
|
124
|
+
- [x] Max Pooling (2D & 1D) Layer
|
|
125
|
+
- [x] Average Pooling (2D & 1D) Layer
|
|
126
|
+
- [x] Concatenate Layer
|
|
127
|
+
- [x] Add Layer
|
|
128
|
+
- [x] Long Short-Term Memory (LSTM) Layer
|
|
129
|
+
- [x] Dropout Layer
|
|
130
|
+
- [x] Embedding Layer
|
|
131
|
+
- [x] TextVectorization Layer
|
|
132
|
+
- [x] Self-Attention Layer
|
|
133
|
+
- [x] Cross-Attention Layer
|
|
134
|
+
- [x] Feed-Forward Layer
|
|
135
|
+
- [x] Pooler Layer
|
|
136
|
+
- [x] Decoder LM (Language Model) Head
|
|
137
|
+
- [ ] Other Custom Layers
|
|
138
|
+
|
|
139
|
+
### Pytorch :
|
|
140
|
+
|
|
141
|
+
(Note: Currently we only Support Binary and Multi-Class Classification in Pytorch, Segmentation and Single Object Detection will be supported in the next release.)
|
|
142
|
+
|
|
143
|
+
- [x] Linear (Fully Connected) Layer
|
|
144
|
+
- [x] Convolutional Layer (Conv2D)
|
|
145
|
+
- [x] Reshape Layer
|
|
146
|
+
- [x] Flatten Layer
|
|
147
|
+
- [x] Global Average Pooling 2D Layer (AdaptiveAvgPool2d)
|
|
148
|
+
- [x] Max Pooling 2D Layer (MaxPool2d)
|
|
149
|
+
- [x] Average Pooling 2D Layer (AvgPool2d)
|
|
150
|
+
- [x] Concatenate Layer
|
|
151
|
+
- [x] Add Layer
|
|
152
|
+
- [x] Long Short-Term Memory (LSTM) Layer
|
|
153
|
+
- [ ] Dropout Layer
|
|
154
|
+
- [ ] Embedding Layer
|
|
155
|
+
- [ ] EmbeddingBag Layer
|
|
156
|
+
- [ ] 1d Convolution Layer (Conv1d)
|
|
157
|
+
- [ ] 1d Pooling Layers (AvgPool1d,MaxPool1d,AdaptiveAvgPool1d,AdaptiveMaxPool1d)
|
|
158
|
+
- [ ] Transpose Convolution Layers (ConvTranspose2d,ConvTranspose1d)
|
|
159
|
+
- [ ] Global Max Pooling 2D Layer (AdaptiveMaxPool2d)
|
|
160
|
+
- [ ] Other Custom Layers
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
## Getting Started
|
|
164
|
+
If you are new to Backtrace, head over to our Getting Started Guide to quickly set up and use the module in your projects.
|
|
165
|
+
|
|
166
|
+
## Contributing
|
|
167
|
+
We welcome contributions from the community. To contribute, please follow our Contribution Guidelines.
|
|
168
|
+
|
|
169
|
+
## License
|
|
170
|
+
This project is licensed under the MIT License - see the LICENSE file for details.
|
|
171
|
+
|
|
172
|
+
## Contact
|
|
173
|
+
For any inquiries or support, please contact AryaXAI Support.
|
|
@@ -15,6 +15,7 @@ dl_backtrace.egg-info/SOURCES.txt
|
|
|
15
15
|
dl_backtrace.egg-info/dependency_links.txt
|
|
16
16
|
dl_backtrace.egg-info/requires.txt
|
|
17
17
|
dl_backtrace.egg-info/top_level.txt
|
|
18
|
+
dl_backtrace/old_backtrace/__init__.py
|
|
18
19
|
dl_backtrace/old_backtrace/pytorch_backtrace/__init__.py
|
|
19
20
|
dl_backtrace/old_backtrace/pytorch_backtrace/backtrace/__init__.py
|
|
20
21
|
dl_backtrace/old_backtrace/pytorch_backtrace/backtrace/backtrace.py
|
|
@@ -1,102 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: dl_backtrace
|
|
3
|
-
Version: 0.0.16.dev4
|
|
4
|
-
Summary: A python SDK for Deep Learning Backtrace
|
|
5
|
-
Home-page: https://xai.arya.ai/docs/introduction
|
|
6
|
-
License: MIT
|
|
7
|
-
Keywords: aryaxai deep learning backtrace,ML observability
|
|
8
|
-
Classifier: Intended Audience :: Developers
|
|
9
|
-
Classifier: Intended Audience :: Science/Research
|
|
10
|
-
Classifier: Programming Language :: Python :: 3
|
|
11
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
-
Classifier: Operating System :: OS Independent
|
|
13
|
-
Requires-Python: >=3.0
|
|
14
|
-
Description-Content-Type: text/markdown
|
|
15
|
-
License-File: LICENSE
|
|
16
|
-
Requires-Dist: tensorflow==2.14.0
|
|
17
|
-
|
|
18
|
-
# AryaXai-Backtrace
|
|
19
|
-
Backtrace module for Generating Explainability on Deep learning models using TensorFlow / Pytorch
|
|
20
|
-
|
|
21
|
-
# Backtrace Module
|
|
22
|
-
[](LICENSE)
|
|
23
|
-
|
|
24
|
-
## Overview
|
|
25
|
-
|
|
26
|
-
The Backtrace Module is a powerful and patent-pending algorithm developed by AryaXAI for enhancing the explainability of AI models, particularly in the context of complex techniques like deep learning.
|
|
27
|
-
|
|
28
|
-
## Features
|
|
29
|
-
|
|
30
|
-
- **Explainability:** Gain deep insights into your AI models by using the Backtrace algorithm, providing multiple explanations for their decisions.
|
|
31
|
-
|
|
32
|
-
- **Consistency:** Ensure consistent and accurate explanations across different scenarios and use cases.
|
|
33
|
-
|
|
34
|
-
- **Mission-Critical Support:** Tailored for mission-critical AI use cases where transparency is paramount.
|
|
35
|
-
|
|
36
|
-
## Installation
|
|
37
|
-
|
|
38
|
-
To integrate the Backtrace Module into your project, follow these simple steps:
|
|
39
|
-
|
|
40
|
-
```bash
|
|
41
|
-
pip install dl-backtrace
|
|
42
|
-
```
|
|
43
|
-
|
|
44
|
-
usage for Tensoflow based models
|
|
45
|
-
|
|
46
|
-
```python
|
|
47
|
-
from dl_backtrace.tf_backtrace import Backtrace as B
|
|
48
|
-
from dl_backtrace.tf_backtrace import contrast as UC
|
|
49
|
-
from dl_backtrace.tf_backtrace import prop as UP
|
|
50
|
-
from dl_backtrace.tf_backtrace import activation_master
|
|
51
|
-
```
|
|
52
|
-
|
|
53
|
-
usage for Pytorch based models
|
|
54
|
-
|
|
55
|
-
```python
|
|
56
|
-
from dl_backtrace.pytorch_backtrace import Backtrace as B
|
|
57
|
-
from dl_backtrace.pytorch_backtrace import contrast as UC
|
|
58
|
-
from dl_backtrace.pytorch_backtrace import prop as UP
|
|
59
|
-
from dl_backtrace.pytorch_backtrace import activation_master
|
|
60
|
-
```
|
|
61
|
-
|
|
62
|
-
## Example Notebooks
|
|
63
|
-
|
|
64
|
-
| Name | Link |
|
|
65
|
-
|-------------|-------------------------------|
|
|
66
|
-
| Tensorflow Backtrace Tabular Dataset | [Colab Link](https://colab.research.google.com/drive/1A4J-wgShD7M_pUmsqbnI8BD3hE43dT8o?usp=sharing) |
|
|
67
|
-
| Tensorflow Backtrace Textual Dataset | [Colab Link](https://colab.research.google.com/drive/1zT_K8mHdzyfQe_LG576qwiBqw8o6LRQH?usp=sharing) |
|
|
68
|
-
| Tensorflow Backtrace Image Dataset | [Colab Link](https://colab.research.google.com/drive/1KbLtcjYDrPQvG6oJj1wmHdiWxRrtKNrV?usp=sharing) |
|
|
69
|
-
| Pytorch Backtrace Tabular Dataset | [Colab Link](https://colab.research.google.com/drive/1Z4UJNFd83dwXBMM0cmiNYEjh6xhRtQA_?usp=sharing) |
|
|
70
|
-
| Pytorch Backtrace Image Dataset | [Colab Link](https://colab.research.google.com/drive/14XKwCsS9IZep2AlDDYfavnVRNz8_b-jM?usp=sharing) |
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
For more detailed examples and use cases, check out our documentation.
|
|
74
|
-
|
|
75
|
-
## Supported Layers and Future Work
|
|
76
|
-
- [x] Dense (Fully Connected) Layer
|
|
77
|
-
- [x] Convolutional Layer (Conv2D)
|
|
78
|
-
- [x] Reshape Layer
|
|
79
|
-
- [x] Flatten Layer
|
|
80
|
-
- [x] Global Average Pooling 2D Layer
|
|
81
|
-
- [x] Max Pooling 2D Layer
|
|
82
|
-
- [x] Average Pooling 2D Layer
|
|
83
|
-
- [x] Concatenate Layer
|
|
84
|
-
- [x] Add Layer
|
|
85
|
-
- [x] Long Short-Term Memory (LSTM) Layer
|
|
86
|
-
- [x] Batch Normalisation Layer
|
|
87
|
-
- [x] Dropout Layer
|
|
88
|
-
- [ ] Embedding Layer
|
|
89
|
-
- [ ] Other Custom Layers
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
## Getting Started
|
|
93
|
-
If you are new to Backtrace, head over to our Getting Started Guide to quickly set up and use the module in your projects.
|
|
94
|
-
|
|
95
|
-
## Contributing
|
|
96
|
-
We welcome contributions from the community. To contribute, please follow our Contribution Guidelines.
|
|
97
|
-
|
|
98
|
-
## License
|
|
99
|
-
This project is licensed under the MIT License - see the LICENSE file for details.
|
|
100
|
-
|
|
101
|
-
## Contact
|
|
102
|
-
For any inquiries or support, please contact AryaXAI Support.
|
|
@@ -1,85 +0,0 @@
|
|
|
1
|
-
# AryaXai-Backtrace
|
|
2
|
-
Backtrace module for Generating Explainability on Deep learning models using TensorFlow / Pytorch
|
|
3
|
-
|
|
4
|
-
# Backtrace Module
|
|
5
|
-
[](LICENSE)
|
|
6
|
-
|
|
7
|
-
## Overview
|
|
8
|
-
|
|
9
|
-
The Backtrace Module is a powerful and patent-pending algorithm developed by AryaXAI for enhancing the explainability of AI models, particularly in the context of complex techniques like deep learning.
|
|
10
|
-
|
|
11
|
-
## Features
|
|
12
|
-
|
|
13
|
-
- **Explainability:** Gain deep insights into your AI models by using the Backtrace algorithm, providing multiple explanations for their decisions.
|
|
14
|
-
|
|
15
|
-
- **Consistency:** Ensure consistent and accurate explanations across different scenarios and use cases.
|
|
16
|
-
|
|
17
|
-
- **Mission-Critical Support:** Tailored for mission-critical AI use cases where transparency is paramount.
|
|
18
|
-
|
|
19
|
-
## Installation
|
|
20
|
-
|
|
21
|
-
To integrate the Backtrace Module into your project, follow these simple steps:
|
|
22
|
-
|
|
23
|
-
```bash
|
|
24
|
-
pip install dl-backtrace
|
|
25
|
-
```
|
|
26
|
-
|
|
27
|
-
usage for Tensoflow based models
|
|
28
|
-
|
|
29
|
-
```python
|
|
30
|
-
from dl_backtrace.tf_backtrace import Backtrace as B
|
|
31
|
-
from dl_backtrace.tf_backtrace import contrast as UC
|
|
32
|
-
from dl_backtrace.tf_backtrace import prop as UP
|
|
33
|
-
from dl_backtrace.tf_backtrace import activation_master
|
|
34
|
-
```
|
|
35
|
-
|
|
36
|
-
usage for Pytorch based models
|
|
37
|
-
|
|
38
|
-
```python
|
|
39
|
-
from dl_backtrace.pytorch_backtrace import Backtrace as B
|
|
40
|
-
from dl_backtrace.pytorch_backtrace import contrast as UC
|
|
41
|
-
from dl_backtrace.pytorch_backtrace import prop as UP
|
|
42
|
-
from dl_backtrace.pytorch_backtrace import activation_master
|
|
43
|
-
```
|
|
44
|
-
|
|
45
|
-
## Example Notebooks
|
|
46
|
-
|
|
47
|
-
| Name | Link |
|
|
48
|
-
|-------------|-------------------------------|
|
|
49
|
-
| Tensorflow Backtrace Tabular Dataset | [Colab Link](https://colab.research.google.com/drive/1A4J-wgShD7M_pUmsqbnI8BD3hE43dT8o?usp=sharing) |
|
|
50
|
-
| Tensorflow Backtrace Textual Dataset | [Colab Link](https://colab.research.google.com/drive/1zT_K8mHdzyfQe_LG576qwiBqw8o6LRQH?usp=sharing) |
|
|
51
|
-
| Tensorflow Backtrace Image Dataset | [Colab Link](https://colab.research.google.com/drive/1KbLtcjYDrPQvG6oJj1wmHdiWxRrtKNrV?usp=sharing) |
|
|
52
|
-
| Pytorch Backtrace Tabular Dataset | [Colab Link](https://colab.research.google.com/drive/1Z4UJNFd83dwXBMM0cmiNYEjh6xhRtQA_?usp=sharing) |
|
|
53
|
-
| Pytorch Backtrace Image Dataset | [Colab Link](https://colab.research.google.com/drive/14XKwCsS9IZep2AlDDYfavnVRNz8_b-jM?usp=sharing) |
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
For more detailed examples and use cases, check out our documentation.
|
|
57
|
-
|
|
58
|
-
## Supported Layers and Future Work
|
|
59
|
-
- [x] Dense (Fully Connected) Layer
|
|
60
|
-
- [x] Convolutional Layer (Conv2D)
|
|
61
|
-
- [x] Reshape Layer
|
|
62
|
-
- [x] Flatten Layer
|
|
63
|
-
- [x] Global Average Pooling 2D Layer
|
|
64
|
-
- [x] Max Pooling 2D Layer
|
|
65
|
-
- [x] Average Pooling 2D Layer
|
|
66
|
-
- [x] Concatenate Layer
|
|
67
|
-
- [x] Add Layer
|
|
68
|
-
- [x] Long Short-Term Memory (LSTM) Layer
|
|
69
|
-
- [x] Batch Normalisation Layer
|
|
70
|
-
- [x] Dropout Layer
|
|
71
|
-
- [ ] Embedding Layer
|
|
72
|
-
- [ ] Other Custom Layers
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
## Getting Started
|
|
76
|
-
If you are new to Backtrace, head over to our Getting Started Guide to quickly set up and use the module in your projects.
|
|
77
|
-
|
|
78
|
-
## Contributing
|
|
79
|
-
We welcome contributions from the community. To contribute, please follow our Contribution Guidelines.
|
|
80
|
-
|
|
81
|
-
## License
|
|
82
|
-
This project is licensed under the MIT License - see the LICENSE file for details.
|
|
83
|
-
|
|
84
|
-
## Contact
|
|
85
|
-
For any inquiries or support, please contact AryaXAI Support.
|
|
@@ -1,102 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: dl_backtrace
|
|
3
|
-
Version: 0.0.16.dev4
|
|
4
|
-
Summary: A python SDK for Deep Learning Backtrace
|
|
5
|
-
Home-page: https://xai.arya.ai/docs/introduction
|
|
6
|
-
License: MIT
|
|
7
|
-
Keywords: aryaxai deep learning backtrace,ML observability
|
|
8
|
-
Classifier: Intended Audience :: Developers
|
|
9
|
-
Classifier: Intended Audience :: Science/Research
|
|
10
|
-
Classifier: Programming Language :: Python :: 3
|
|
11
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
-
Classifier: Operating System :: OS Independent
|
|
13
|
-
Requires-Python: >=3.0
|
|
14
|
-
Description-Content-Type: text/markdown
|
|
15
|
-
License-File: LICENSE
|
|
16
|
-
Requires-Dist: tensorflow==2.14.0
|
|
17
|
-
|
|
18
|
-
# AryaXai-Backtrace
|
|
19
|
-
Backtrace module for Generating Explainability on Deep learning models using TensorFlow / Pytorch
|
|
20
|
-
|
|
21
|
-
# Backtrace Module
|
|
22
|
-
[](LICENSE)
|
|
23
|
-
|
|
24
|
-
## Overview
|
|
25
|
-
|
|
26
|
-
The Backtrace Module is a powerful and patent-pending algorithm developed by AryaXAI for enhancing the explainability of AI models, particularly in the context of complex techniques like deep learning.
|
|
27
|
-
|
|
28
|
-
## Features
|
|
29
|
-
|
|
30
|
-
- **Explainability:** Gain deep insights into your AI models by using the Backtrace algorithm, providing multiple explanations for their decisions.
|
|
31
|
-
|
|
32
|
-
- **Consistency:** Ensure consistent and accurate explanations across different scenarios and use cases.
|
|
33
|
-
|
|
34
|
-
- **Mission-Critical Support:** Tailored for mission-critical AI use cases where transparency is paramount.
|
|
35
|
-
|
|
36
|
-
## Installation
|
|
37
|
-
|
|
38
|
-
To integrate the Backtrace Module into your project, follow these simple steps:
|
|
39
|
-
|
|
40
|
-
```bash
|
|
41
|
-
pip install dl-backtrace
|
|
42
|
-
```
|
|
43
|
-
|
|
44
|
-
usage for Tensoflow based models
|
|
45
|
-
|
|
46
|
-
```python
|
|
47
|
-
from dl_backtrace.tf_backtrace import Backtrace as B
|
|
48
|
-
from dl_backtrace.tf_backtrace import contrast as UC
|
|
49
|
-
from dl_backtrace.tf_backtrace import prop as UP
|
|
50
|
-
from dl_backtrace.tf_backtrace import activation_master
|
|
51
|
-
```
|
|
52
|
-
|
|
53
|
-
usage for Pytorch based models
|
|
54
|
-
|
|
55
|
-
```python
|
|
56
|
-
from dl_backtrace.pytorch_backtrace import Backtrace as B
|
|
57
|
-
from dl_backtrace.pytorch_backtrace import contrast as UC
|
|
58
|
-
from dl_backtrace.pytorch_backtrace import prop as UP
|
|
59
|
-
from dl_backtrace.pytorch_backtrace import activation_master
|
|
60
|
-
```
|
|
61
|
-
|
|
62
|
-
## Example Notebooks
|
|
63
|
-
|
|
64
|
-
| Name | Link |
|
|
65
|
-
|-------------|-------------------------------|
|
|
66
|
-
| Tensorflow Backtrace Tabular Dataset | [Colab Link](https://colab.research.google.com/drive/1A4J-wgShD7M_pUmsqbnI8BD3hE43dT8o?usp=sharing) |
|
|
67
|
-
| Tensorflow Backtrace Textual Dataset | [Colab Link](https://colab.research.google.com/drive/1zT_K8mHdzyfQe_LG576qwiBqw8o6LRQH?usp=sharing) |
|
|
68
|
-
| Tensorflow Backtrace Image Dataset | [Colab Link](https://colab.research.google.com/drive/1KbLtcjYDrPQvG6oJj1wmHdiWxRrtKNrV?usp=sharing) |
|
|
69
|
-
| Pytorch Backtrace Tabular Dataset | [Colab Link](https://colab.research.google.com/drive/1Z4UJNFd83dwXBMM0cmiNYEjh6xhRtQA_?usp=sharing) |
|
|
70
|
-
| Pytorch Backtrace Image Dataset | [Colab Link](https://colab.research.google.com/drive/14XKwCsS9IZep2AlDDYfavnVRNz8_b-jM?usp=sharing) |
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
For more detailed examples and use cases, check out our documentation.
|
|
74
|
-
|
|
75
|
-
## Supported Layers and Future Work
|
|
76
|
-
- [x] Dense (Fully Connected) Layer
|
|
77
|
-
- [x] Convolutional Layer (Conv2D)
|
|
78
|
-
- [x] Reshape Layer
|
|
79
|
-
- [x] Flatten Layer
|
|
80
|
-
- [x] Global Average Pooling 2D Layer
|
|
81
|
-
- [x] Max Pooling 2D Layer
|
|
82
|
-
- [x] Average Pooling 2D Layer
|
|
83
|
-
- [x] Concatenate Layer
|
|
84
|
-
- [x] Add Layer
|
|
85
|
-
- [x] Long Short-Term Memory (LSTM) Layer
|
|
86
|
-
- [x] Batch Normalisation Layer
|
|
87
|
-
- [x] Dropout Layer
|
|
88
|
-
- [ ] Embedding Layer
|
|
89
|
-
- [ ] Other Custom Layers
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
## Getting Started
|
|
93
|
-
If you are new to Backtrace, head over to our Getting Started Guide to quickly set up and use the module in your projects.
|
|
94
|
-
|
|
95
|
-
## Contributing
|
|
96
|
-
We welcome contributions from the community. To contribute, please follow our Contribution Guidelines.
|
|
97
|
-
|
|
98
|
-
## License
|
|
99
|
-
This project is licensed under the MIT License - see the LICENSE file for details.
|
|
100
|
-
|
|
101
|
-
## Contact
|
|
102
|
-
For any inquiries or support, please contact AryaXAI Support.
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/old_backtrace/tf_backtrace/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/pytorch_backtrace/backtrace/config.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/models.py
RENAMED
|
File without changes
|
{dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/server.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dl_backtrace-0.0.16.dev4 → dl_backtrace-0.0.18}/dl_backtrace/tf_backtrace/backtrace/utils/helper.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|