noshot 1.0.0__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. noshot/data/ML TS XAI/TS/bill-charge.ipynb +239 -0
  2. noshot/data/ML TS XAI/{XAI/XAI 2/Exp-3 (EDA-loan).ipynb → TS/daily-min-temperatures.ipynb } +68 -50
  3. noshot/data/ML TS XAI/TS/data/bill-data.csv +21 -0
  4. noshot/data/ML TS XAI/TS/data/daily-min-temperatures.csv +3651 -0
  5. noshot/data/ML TS XAI/TS/data/monthly-sunspots.csv +2821 -0
  6. noshot/data/ML TS XAI/TS/monthly-sunspots.ipynb +241 -0
  7. {noshot-1.0.0.dist-info → noshot-2.0.0.dist-info}/METADATA +1 -1
  8. noshot-2.0.0.dist-info/RECORD +15 -0
  9. {noshot-1.0.0.dist-info → noshot-2.0.0.dist-info}/WHEEL +1 -1
  10. noshot/data/ML TS XAI/TS/10. Seasonal ARIMA Forecasting.ipynb +0 -246
  11. noshot/data/ML TS XAI/TS/11. Multivariate ARIMA Forecasting.ipynb +0 -228
  12. noshot/data/ML TS XAI/TS/6. ACF PACF.ipynb +0 -77
  13. noshot/data/ML TS XAI/TS/7. Differencing.ipynb +0 -167
  14. noshot/data/ML TS XAI/TS/8. ARMA Forecasting.ipynb +0 -197
  15. noshot/data/ML TS XAI/TS/9. ARIMA Forecasting.ipynb +0 -220
  16. noshot/data/ML TS XAI/XAI/XAI 1/EDA2_chipsdatset.ipynb +0 -633
  17. noshot/data/ML TS XAI/XAI/XAI 1/EDA_IRISH_8thjan.ipynb +0 -326
  18. noshot/data/ML TS XAI/XAI/XAI 1/XAI_EX1 MODEL BIAS (FINAL).ipynb +0 -487
  19. noshot/data/ML TS XAI/XAI/XAI 1/complete_guide_to_eda_on_text_data.ipynb +0 -845
  20. noshot/data/ML TS XAI/XAI/XAI 1/deepchecksframeworks.ipynb +0 -100
  21. noshot/data/ML TS XAI/XAI/XAI 1/deepexplainers (mnist).ipynb +0 -90
  22. noshot/data/ML TS XAI/XAI/XAI 1/guidedbackpropagation.ipynb +0 -203
  23. noshot/data/ML TS XAI/XAI/XAI 1/updated_image_EDA1_with_LRP.ipynb +0 -3998
  24. noshot/data/ML TS XAI/XAI/XAI 1/zebrastripes.ipynb +0 -271
  25. noshot/data/ML TS XAI/XAI/XAI 2/EXP_5.ipynb +0 -1545
  26. noshot/data/ML TS XAI/XAI/XAI 2/Exp-3 (EDA-movie).ipynb +0 -229
  27. noshot/data/ML TS XAI/XAI/XAI 2/Exp-4(Flower dataset).ipynb +0 -237
  28. noshot/data/ML TS XAI/XAI/XAI 2/Exp-4.ipynb +0 -241
  29. noshot/data/ML TS XAI/XAI/XAI 2/Exp_2.ipynb +0 -352
  30. noshot/data/ML TS XAI/XAI/XAI 2/Exp_7.ipynb +0 -110
  31. noshot/data/ML TS XAI/XAI/XAI 2/FeatureImportance_SensitivityAnalysis.ipynb +0 -708
  32. noshot-1.0.0.dist-info/RECORD +0 -32
  33. {noshot-1.0.0.dist-info → noshot-2.0.0.dist-info}/licenses/LICENSE.txt +0 -0
  34. {noshot-1.0.0.dist-info → noshot-2.0.0.dist-info}/top_level.txt +0 -0
@@ -1,271 +0,0 @@
1
- {
2
- "cells": [
3
- {
4
- "cell_type": "markdown",
5
- "metadata": {
6
- "id": "4PnCCPc1p3y0"
7
- },
8
- "source": []
9
- },
10
- {
11
- "cell_type": "code",
12
- "execution_count": null,
13
- "metadata": {
14
- "colab": {
15
- "base_uri": "https://localhost:8080/",
16
- "height": 998
17
- },
18
- "id": "Vto3GO8wjdwW",
19
- "outputId": "e5434ca5-a006-4466-8494-64b63fd274ed"
20
- },
21
- "outputs": [],
22
- "source": [
23
- "import numpy as np\n",
24
- "import tensorflow as tf\n",
25
- "from tensorflow.keras.applications.inception_v3 import InceptionV3, preprocess_input, decode_predictions\n",
26
- "from tensorflow.keras.preprocessing.image import load_img, img_to_array\n",
27
- "from sklearn.linear_model import SGDClassifier\n",
28
- "import matplotlib.pyplot as plt\n",
29
- "import os\n",
30
- "\n",
31
- "# Step 1: Load Pretrained Model\n",
32
- "model = InceptionV3(weights='imagenet')\n",
33
- "layer_name = 'mixed10' # Layer to get activations from\n",
34
- "\n",
35
- "intermediate_model = tf.keras.Model(\n",
36
- " inputs=model.input,\n",
37
- " outputs=model.get_layer(layer_name).output\n",
38
- ")\n",
39
- "\n",
40
- "# Step 2: Load a local zebra image\n",
41
- "img_path = 'zebra.jpg' # Ensure this image exists in your working directory\n",
42
- "img = load_img(img_path, target_size=(299, 299))\n",
43
- "x = img_to_array(img)\n",
44
- "x = np.expand_dims(x, axis=0)\n",
45
- "x = preprocess_input(x)\n",
46
- "\n",
47
- "preds = model.predict(x)\n",
48
- "print(\"Prediction:\", decode_predictions(preds, top=3)[0])\n",
49
- "\n",
50
- "# Step 3: Generate synthetic \"striped\" concept images\n",
51
- "def generate_striped_images(num=20):\n",
52
- " images = []\n",
53
- " for _ in range(num):\n",
54
- " img = np.zeros((299, 299, 3), dtype=np.uint8)\n",
55
- " for i in range(0, 299, 10):\n",
56
- " img[:, i:i+5, :] = 255 # white vertical stripes\n",
57
- " images.append(preprocess_input(np.expand_dims(img, axis=0)))\n",
58
- " return np.vstack(images)\n",
59
- "\n",
60
- "striped_images = generate_striped_images(20)\n",
61
- "\n",
62
- "# Step 4: Extract activations\n",
63
- "concept_activations = intermediate_model.predict(striped_images)\n",
64
- "image_activations = intermediate_model.predict(x)\n",
65
- "\n",
66
- "# Step 5: Train CAV (Concept Activation Vector)\n",
67
- "X = np.concatenate([concept_activations, np.random.normal(size=concept_activations.shape)])\n",
68
- "y = np.array([1] * len(concept_activations) + [0] * len(concept_activations))\n",
69
- "\n",
70
- "X_flat = X.reshape(X.shape[0], -1)\n",
71
- "clf = SGDClassifier().fit(X_flat, y)\n",
72
- "cav = clf.coef_.reshape(concept_activations.shape[1:])\n",
73
- "\n",
74
- "# Step 6: Compute directional derivative\n",
75
- "image_activ_flat = image_activations.reshape(-1)\n",
76
- "cav_flat = cav.reshape(-1)\n",
77
- "directional_derivative = np.dot(image_activ_flat, cav_flat)\n",
78
- "print(f\"Directional derivative: {directional_derivative:.4f}\")\n",
79
- "\n",
80
- "# Step 7: TCAV Score: high means concept strongly influences the prediction\n",
81
- "tcav_score = np.mean([np.dot(act.reshape(-1), cav_flat) > 0 for act in concept_activations])\n",
82
- "print(f\"TCAV Score for 'striped' concept influencing 'zebra': {tcav_score:.2f}\")\n",
83
- "\n",
84
- "# Step 8: Visualize\n",
85
- "plt.imshow(img)\n",
86
- "plt.title(\"Target Image: Zebra\")\n",
87
- "plt.axis('off')\n",
88
- "plt.show()\n",
89
- "\n",
90
- "# Visualize a striped concept\n",
91
- "plt.imshow(generate_striped_images(1)[0].astype(np.uint8))\n",
92
- "plt.title(\"Concept Image: Striped\")\n",
93
- "plt.axis('off')\n",
94
- "plt.show()\n"
95
- ]
96
- },
97
- {
98
- "cell_type": "code",
99
- "execution_count": null,
100
- "metadata": {
101
- "colab": {
102
- "base_uri": "https://localhost:8080/",
103
- "height": 391
104
- },
105
- "id": "hAoVI8lvrxMK",
106
- "outputId": "40547026-5028-4e9d-faa2-a42f40db32f2"
107
- },
108
- "outputs": [],
109
- "source": [
110
- "import seaborn as sns\n",
111
- "\n",
112
- "# Plotting TCAV Score\n",
113
- "plt.figure(figsize=(6, 4))\n",
114
- "sns.barplot(x=['Striped'], y=[tcav_score])\n",
115
- "plt.ylim(0, 1)\n",
116
- "plt.ylabel(\"TCAV Score\")\n",
117
- "plt.title(\"Concept Influence on 'Zebra' Prediction\")\n",
118
- "plt.show()\n"
119
- ]
120
- },
121
- {
122
- "cell_type": "code",
123
- "execution_count": null,
124
- "metadata": {
125
- "id": "uHPJjGGarxO0"
126
- },
127
- "outputs": [],
128
- "source": []
129
- },
130
- {
131
- "cell_type": "code",
132
- "execution_count": null,
133
- "metadata": {
134
- "colab": {
135
- "base_uri": "https://localhost:8080/",
136
- "height": 471
137
- },
138
- "id": "zdSI-SG7rxUv",
139
- "outputId": "c02c478d-9e85-4714-92ed-7371f97b9301"
140
- },
141
- "outputs": [],
142
- "source": [
143
- "# Visualizing a few feature maps\n",
144
- "activ_map = image_activations[0] # shape: (H, W, C)\n",
145
- "\n",
146
- "plt.figure(figsize=(12, 6))\n",
147
- "for i in range(6):\n",
148
- " plt.subplot(2, 3, i+1)\n",
149
- " plt.imshow(activ_map[:, :, i], cmap='viridis')\n",
150
- " plt.axis('off')\n",
151
- " plt.title(f\"Activation {i}\")\n",
152
- "plt.suptitle(\"Sample Activations from 'mixed10'\")\n",
153
- "plt.tight_layout()\n",
154
- "plt.show()\n"
155
- ]
156
- },
157
- {
158
- "cell_type": "code",
159
- "execution_count": null,
160
- "metadata": {
161
- "colab": {
162
- "base_uri": "https://localhost:8080/",
163
- "height": 1000
164
- },
165
- "id": "1_Wt1uCxjdzm",
166
- "outputId": "6ba618a0-dadb-49ac-9d7f-6ae4d0def622"
167
- },
168
- "outputs": [],
169
- "source": [
170
- "import os\n",
171
- "import numpy as np\n",
172
- "import tensorflow as tf\n",
173
- "from tensorflow.keras.applications.inception_v3 import InceptionV3, preprocess_input, decode_predictions\n",
174
- "from tensorflow.keras.preprocessing.image import load_img, img_to_array\n",
175
- "from sklearn.linear_model import SGDClassifier\n",
176
- "import matplotlib.pyplot as plt\n",
177
- "\n",
178
- "# Load pretrained model and intermediate layer\n",
179
- "model = InceptionV3(weights='imagenet')\n",
180
- "layer_name = 'mixed10'\n",
181
- "intermediate_model = tf.keras.Model(inputs=model.input, outputs=model.get_layer(layer_name).output)\n",
182
- "\n",
183
- "# Load and preprocess a target image (e.g., zebra)\n",
184
- "img_path = '/home/zebra.jpg' # Put your own target image here\n",
185
- "img = load_img(img_path, target_size=(299, 299))\n",
186
- "x = img_to_array(img)\n",
187
- "x = np.expand_dims(x, axis=0)\n",
188
- "x = preprocess_input(x)\n",
189
- "\n",
190
- "preds = model.predict(x)\n",
191
- "print(\"Prediction:\", decode_predictions(preds, top=3)[0])\n",
192
- "\n",
193
- "# Helper function to load images from a folder\n",
194
- "def load_images_from_folder(folder, target_size=(299, 299)):\n",
195
- " images = []\n",
196
- " for filename in os.listdir(folder):\n",
197
- " if filename.lower().endswith(('.jpg', '.png', '.jpeg')):\n",
198
- " path = os.path.join(folder, filename)\n",
199
- " img = load_img(path, target_size=target_size)\n",
200
- " img_array = preprocess_input(np.expand_dims(img_to_array(img), axis=0))\n",
201
- " images.append(img_array)\n",
202
- " return np.vstack(images)\n",
203
- "\n",
204
- "# Load concept and random images from folders\n",
205
- "concept_folder = '/home/striped'\n",
206
- "random_folder = '/home/sky'\n",
207
- "\n",
208
- "concept_images = load_images_from_folder(concept_folder)\n",
209
- "random_images = load_images_from_folder(random_folder)\n",
210
- "\n",
211
- "# Extract activations\n",
212
- "concept_activations = intermediate_model.predict(concept_images)\n",
213
- "random_activations = intermediate_model.predict(random_images)\n",
214
- "image_activations = intermediate_model.predict(x)\n",
215
- "\n",
216
- "# Train CAV\n",
217
- "X = np.concatenate([concept_activations, random_activations])\n",
218
- "y = np.array([1] * len(concept_activations) + [0] * len(random_activations))\n",
219
- "\n",
220
- "X_flat = X.reshape(X.shape[0], -1)\n",
221
- "clf = SGDClassifier().fit(X_flat, y)\n",
222
- "cav = clf.coef_.reshape(concept_activations.shape[1:])\n",
223
- "\n",
224
- "# Compute directional derivative\n",
225
- "image_activ_flat = image_activations.reshape(-1)\n",
226
- "cav_flat = cav.reshape(-1)\n",
227
- "directional_derivative = np.dot(image_activ_flat, cav_flat)\n",
228
- "print(f\"Directional derivative: {directional_derivative:.4f}\")\n",
229
- "\n",
230
- "# TCAV Score\n",
231
- "tcav_score = np.mean([np.dot(act.reshape(-1), cav_flat) > 0 for act in concept_activations])\n",
232
- "print(f\"TCAV Score for concept: {tcav_score:.2f}\")\n",
233
- "\n",
234
- "# Visualization\n",
235
- "plt.imshow(img)\n",
236
- "plt.title(\"Target Image\")\n",
237
- "plt.axis('off')\n",
238
- "plt.show()\n",
239
- "\n",
240
- "plt.imshow(load_img(os.path.join(concept_folder, os.listdir(concept_folder)[0])))\n",
241
- "plt.title(\"Concept Image Sample\")\n",
242
- "plt.axis('off')\n",
243
- "plt.show()\n"
244
- ]
245
- }
246
- ],
247
- "metadata": {
248
- "colab": {
249
- "provenance": []
250
- },
251
- "kernelspec": {
252
- "display_name": "Python 3 (ipykernel)",
253
- "language": "python",
254
- "name": "python3"
255
- },
256
- "language_info": {
257
- "codemirror_mode": {
258
- "name": "ipython",
259
- "version": 3
260
- },
261
- "file_extension": ".py",
262
- "mimetype": "text/x-python",
263
- "name": "python",
264
- "nbconvert_exporter": "python",
265
- "pygments_lexer": "ipython3",
266
- "version": "3.12.4"
267
- }
268
- },
269
- "nbformat": 4,
270
- "nbformat_minor": 4
271
- }