unocg 0.0.2__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of unocg might be problematic. Click here for more details.

File without changes
unocg/utils/data.py ADDED
@@ -0,0 +1,95 @@
1
+ """
2
+ Data loading for microstructure datasets
3
+ """
4
+
5
+ import math
6
+ from typing import Optional, Union, Dict
7
+
8
+ import torch
9
+ import h5py
10
+
11
+
12
+ class MicrostructureDataset(torch.utils.data.Dataset):
13
+ """
14
+ Represents a dataset in a microstructure from a HDF5 file
15
+ """
16
+ def __init__(
17
+ self,
18
+ file_name: str,
19
+ group_name: str,
20
+ lazy_loading: Optional[bool] = True,
21
+ device = "cpu",
22
+ dtype: Optional[torch.dtype] = None,
23
+ ):
24
+ """
25
+ Constructor of the class. Create a `PyTorch` dataset from given HDF5 file groups.
26
+
27
+ :param file_name: path to the HDF5 file
28
+ :type file_name: str
29
+ :param group_name: path to the group in the HDF5 file
30
+ :type group_name: str
31
+ :param lazy_loading:
32
+ :type lazy_loading: bool
33
+ :param dtype:
34
+ """
35
+ super().__init__()
36
+ self.file_name = file_name
37
+ self.group_name = group_name
38
+ self.lazy_loading = lazy_loading
39
+ self.keys = []
40
+ self.loaded_keys = []
41
+ self.images = {}
42
+ if dtype is None:
43
+ self.dtype = torch.float32
44
+ else:
45
+ self.dtype = dtype
46
+ self.device = device
47
+ self.tensor_args = {"dtype": self.dtype, "device": self.device}
48
+
49
+ with h5py.File(self.file_name, "r") as file:
50
+ for dset_name in file[self.group_name].keys():
51
+ self.keys.append(dset_name)
52
+ if not self.lazy_loading:
53
+ for dset_name in self.keys:
54
+ self.load_dset(dset_name)
55
+
56
+ def __len__(self) -> int:
57
+ """
58
+ Get the length of the dataset, i.e. how many data points it contains.
59
+
60
+ :return: Length of the dataset
61
+ :rtype: int
62
+ """
63
+ return len(self.keys)
64
+
65
+ def __getitem__(self, index: int) -> Dict[str, Union[torch.tensor, str]]:
66
+ """
67
+ Fetch a data point with given index from the dataset
68
+
69
+ :param index: Index of the data point
70
+ :type index: int
71
+ :return: microstructure image
72
+ :rtype: torch.Tensor
73
+ """
74
+ if index >= len(self.keys):
75
+ raise ValueError("Dataset is not available")
76
+ dset_name = self.keys[index]
77
+ self.load_dset(dset_name, force_loading=False)
78
+ return self.images[dset_name].clone()
79
+
80
+ def load_dset(self, dset_name: str, force_loading: Optional[bool] = True):
81
+ """
82
+ Load dataset from HDF5 file
83
+
84
+ :param dset_name:
85
+ :param force_loading:
86
+ :return:
87
+ """
88
+ if (not force_loading) and (dset_name in self.loaded_keys):
89
+ return
90
+
91
+ with h5py.File(self.file_name, "r") as file:
92
+ image = torch.tensor(file[self.group_name][dset_name]["image"][...], **self.tensor_args)
93
+
94
+ self.images[dset_name] = image
95
+ self.loaded_keys.append(dset_name)
@@ -0,0 +1,48 @@
1
+ from typing import Dict
2
+
3
+ import torch
4
+ from unocg.problems import Problem
5
+ from unocg.solvers.torch import CgSolver
6
+ from unocg.modules.solvers import CgModule
7
+ from unocg.preconditioners import Preconditioner
8
+ import timeit
9
+
10
+
11
+ def benchmark(model, inputs, device, n_runs=2, n_warmup=1, verbose=True):
12
+ def run_model():
13
+ with torch.inference_mode():
14
+ model(*inputs)
15
+ torch.cuda.synchronize(device)
16
+
17
+ for _ in range(n_warmup):
18
+ run_model()
19
+ model_time = timeit.timeit(run_model, number=n_runs) / n_runs
20
+ if verbose:
21
+ print(f"Runtime per execution: {model_time*1000.:.4f}ms")
22
+ return model_time
23
+
24
+
25
+ def benchmark_cg(cg_module, param_fields, loadings, device, n_runs=2, n_warmup=1, verbose=True):
26
+ with torch.inference_mode():
27
+ guess = cg_module.zero_guess(param_fields, loadings)
28
+
29
+ inputs_module = (guess, param_fields, loadings)
30
+
31
+ print(f"Overall solver:")
32
+ u = cg_module(*inputs_module)
33
+ time_overall = benchmark(cg_module, inputs_module, device, n_runs=n_runs)
34
+
35
+ iter_layer = cg_module.iteration_layers[0]
36
+ print(f"Solver iteration:")
37
+ batch_shape = (*param_fields.shape[:(-cg_module.n_dim - 1)], loadings.shape[0])
38
+ field_shape = (*batch_shape, *cg_module.shape)
39
+ iv_fields, iv_scalars = iter_layer.init_internal_variables(batch_shape=batch_shape, init_residual=u)
40
+ time_iter = benchmark(iter_layer, (u, u, param_fields, loadings, iv_fields, iv_scalars), device, n_runs=n_runs*10)
41
+
42
+ print(f"Preconditioner application:")
43
+ time_prec = benchmark(cg_module.prec_model, (u,), n_runs=n_runs*100, n_warmup=n_warmup, device=device);
44
+
45
+ print(f"Residual computation:")
46
+ time_matvec = benchmark(cg_module.matvec_model, (u, param_fields), n_runs=n_runs*10, n_warmup=n_warmup, device=device)
47
+
48
+ return time_overall, time_iter, time_prec, time_matvec
@@ -0,0 +1,339 @@
1
+ """
2
+ Plotting utilities
3
+ """
4
+ from typing import Optional, Iterable, List, Tuple
5
+
6
+ import matplotlib
7
+ from matplotlib.tri import Triangulation
8
+ import numpy as np
9
+ import torch
10
+ from matplotlib import pyplot as plt, ticker
11
+ import math
12
+ import shutil
13
+ from unocg.problems import Problem, BC
14
+
15
+ try:
16
+ import pyvista as pv
17
+ except:
18
+ pass
19
+
20
+ plt.rcParams["text.usetex"] = True if shutil.which('latex') else False
21
+ matplotlib.rcParams["text.latex.preamble"] = r"\usepackage{amsmath}"
22
+ plot_width = 6.3
23
+
24
+ colors = [
25
+ "g",
26
+ "#00BEFF",
27
+ "#004191",
28
+ "r",
29
+ "k"
30
+ ]
31
+
32
+
33
+ def plot_ms(image: torch.Tensor, file: Optional[str] = None, show_axis: bool = False, show_cbar: bool = False):
34
+ """
35
+ Plot microstructure image
36
+
37
+ :param image:
38
+ :param file:
39
+ :param show_axis:
40
+ :param show_cbar:
41
+ :return:
42
+ """
43
+ image = image.detach().cpu()
44
+ fig, ax = plt.subplots(1, 1, figsize=[6.3 / 2, 2.65 if show_cbar else 6.3 / 2], dpi=300)
45
+ ms_cmap = plt.get_cmap("viridis", 2)
46
+ im = ax.imshow(image, origin="lower", interpolation="none", extent=(-0.5, 0.5, -0.5, 0.5), cmap=ms_cmap)
47
+
48
+ if show_cbar:
49
+ cb = fig.colorbar(im, ax=ax, ticks=[0, 1])
50
+ cb.ax.set_title(rf"$\chi_1$")
51
+ cb.ax.set_yticklabels(["$0$", "$1$"])
52
+
53
+ if show_axis:
54
+ ax.axis("on")
55
+ ax.set_xlim(-0.5, 0.5)
56
+ ax.set_ylim(-0.5, 0.5)
57
+ ax.set_xticks([-0.5, 0.0, 0.5])
58
+ ax.set_yticks([-0.5, 0.0, 0.5])
59
+ ax.set_xlabel(r"$\frac{x_1}{l_1}$")
60
+ ax.set_ylabel(r"$\frac{x_2}{l_2}$")
61
+ else:
62
+ ax.axis("off")
63
+
64
+ plt.tight_layout()
65
+ if file is not None:
66
+ plt.savefig(file, dpi=300)
67
+ plt.show()
68
+
69
+
70
+ def plot_prec_action(ax, field_ref, init_res, precs, labels, ch_idx=0, plot_res=False, centered=True, ch_label=None):
71
+ idx = (0,0,ch_idx)
72
+ ax_i = 0
73
+ if centered:
74
+ s_abs_max = torch.max(field_ref[idx].cpu().abs())
75
+ s_max, s_min = s_abs_max, -s_abs_max
76
+ else:
77
+ s_max, s_min = torch.max(field_ref[idx].cpu()), torch.min(field_ref[idx].cpu())
78
+
79
+ prec_actions = []
80
+ with torch.inference_mode():
81
+ for prec in precs:
82
+ prec_action = prec.apply_field(init_res)
83
+ prec_action = prec_action / prec_action[idx].max() * field_ref[idx].abs().max()
84
+ prec_actions.append(prec_action)
85
+ if centered:
86
+ s_abs_max = torch.maximum(s_abs_max, torch.max(prec_action[idx].cpu().abs()))
87
+ s_max, s_min = s_abs_max, -s_abs_max
88
+ else:
89
+ s_max = torch.maximum(s_max, torch.max(prec_action[idx].cpu()))
90
+ s_min = torch.minimum(s_min, torch.min(prec_action[idx].cpu()))
91
+
92
+ if plot_res:
93
+ im = ax[0,ax_i].imshow(init_res[idx].cpu().detach(), origin="lower", cmap="seismic")
94
+ if ch_label is None:
95
+ ax[0,ax_i].set_title(r"$\boldsymbol{r}^{(0)}$")
96
+ else:
97
+ ax[0,ax_i].set_title(rf"$\left( \boldsymbol{{r}}^{{(0)}} \right)_{{{ch_label}}}$")
98
+ plt.colorbar(im, ax=ax[ax_i])
99
+ ax_i += 1
100
+
101
+ im = ax[0,ax_i].imshow(field_ref[idx].cpu().detach(), origin="lower", cmap="jet", vmax=s_max, vmin=s_min)
102
+ if ch_label is None:
103
+ ax[0,ax_i].set_title(r"$\boldsymbol{u}_{\underline{\mu}}$")
104
+ else:
105
+ ax[0,ax_i].set_title(rf"$\left( \boldsymbol{{u}}_{{\underline{{a}}}} \right)_{{{ch_label}}}$")
106
+ plt.colorbar(im, ax=ax[0,ax_i])
107
+ ax_i += 1
108
+
109
+ for prec_action, label in zip(prec_actions, labels):
110
+ im = ax[0,ax_i].imshow(prec_action[idx].cpu().detach(), origin="lower", cmap="jet", vmax=s_max, vmin=s_min)
111
+ if ch_label is None:
112
+ ax[0,ax_i].set_title(rf"$\boldsymbol{{s}}_\text{{{label}}}$")
113
+ else:
114
+ ax[0,ax_i].set_title(rf"$\left( \boldsymbol{{s}}_\text{{{label}}} \right)_{{{ch_label}}}$")
115
+ plt.colorbar(im, ax=ax[0,ax_i])
116
+ ax_i += 1
117
+
118
+ for ax_handle in ax.ravel():
119
+ ax_handle.set_xticks([])
120
+ ax_handle.set_yticks([])
121
+
122
+
123
+ def plot_convergence(ax, results, labels, colors, metrics, metric_labels, load_names=None, rates=None, show_load_labels=False,
124
+ xmin=0, xmax=None, bounds=False, ymin=1e-10, ymax=1e0, zoom=False, zoom_it = 10, zoom_tol = 1e-3):
125
+ """
126
+
127
+ """
128
+ if load_names is None:
129
+ load_names = ["x"]
130
+ if rates is None:
131
+ rates = (None,) * len(results)
132
+ colors = colors[:len(results)]
133
+ while len(labels) < len(results):
134
+ labels.append("CG")
135
+
136
+ for load_i, load_name in enumerate(load_names):
137
+ if show_load_labels:
138
+ ax[0, load_i].set_title(f"loading {load_name}")
139
+
140
+ for metric_i, (metric, metric_label) in enumerate(zip(metrics, metric_labels)):
141
+ ax[metric_i, load_i].set_xlabel(r"iterations $[-]$")
142
+ ax[metric_i, load_i].set_ylabel(metric_label)
143
+ ax[metric_i, load_i].set_ylim(bottom=ymin, top=ymax)
144
+ if zoom:
145
+ axins = ax[metric_i, load_i].inset_axes([0.2, 0.15, 0.1, 0.45],
146
+ xlim=(-1, zoom_it), ylim=(zoom_tol, 1e0), xticklabels=[], yticklabels=[])
147
+ axins.grid()
148
+ axins.set_xticks([0, zoom_it], ["$0$", f"${zoom_it}$"])
149
+
150
+ for result, label, color, rate in zip(results, labels, colors, rates):
151
+ if result is None:
152
+ continue
153
+ iters = torch.arange(result["err_history"].shape[0])
154
+ metric_losses = result["losses"][metric]
155
+ if metric_losses.ndim == 1:
156
+ metric_losses = metric_losses.unsqueeze(-1)
157
+ ax[metric_i, load_i].semilogy(iters, metric_losses[:, load_i].cpu() / metric_losses[0, load_i].cpu(), '-', c=color, label=label)
158
+
159
+ if zoom:
160
+ axins.semilogy(iters, metric_losses[:, load_i].cpu() / metric_losses[0, load_i].cpu(), '-', c=color)
161
+
162
+ if zoom:
163
+ ax[metric_i, load_i].indicate_inset_zoom(axins, edgecolor="black")
164
+
165
+ for ax_handle in ax.ravel():
166
+ ax_handle.grid()
167
+ ax_handle.set_xlim(left=xmin, right=xmax)
168
+
169
+
170
+ def plot_convergence_histogram(ax, results, labels, colors, rtol=1e-6, bins=None, xmin=0, xmax=None, log_scale=False, legend=True):
171
+ """
172
+
173
+ """
174
+ for result, label, color in zip(results, labels, colors):
175
+ if result is None:
176
+ continue
177
+ metric = get_rel_residual(result)
178
+ iters = (metric > rtol).sum(dim=0)
179
+ ax.hist(iters, edgecolor=color, facecolor=color, bins=bins, label=label)
180
+ if log_scale:
181
+ ax.set_xlim(max(xmin, 1), xmax)
182
+ ax.set_xscale("log")
183
+ else:
184
+ ax.set_xlim(xmin, xmax)
185
+ ax.set_axisbelow(True)
186
+ ax.grid()
187
+ if legend:
188
+ ax.legend()
189
+
190
+ def get_rel_residual(result):
191
+ """
192
+
193
+ """
194
+ return result['err_history'].cpu().flatten(start_dim=-2) / result['err_history'].cpu().flatten(start_dim=-2)[0]
195
+
196
+
197
+ def plot_deformed_rve_2d(
198
+ problem,
199
+ disp,
200
+ field,
201
+ loading=None,
202
+ fluctuation_scaling: float = 1.0,
203
+ deformation_scaling: float = 1.0,
204
+ plot_loading: bool = False,
205
+ plot_boundary: bool = False,
206
+ shading: str = "gouraud",
207
+ file: Optional[str] = None,
208
+ vmin: Optional[List[float]] = None,
209
+ vmax: Optional[List[float]] = None,
210
+ figsize: Optional[List[float]] = None
211
+ ):
212
+ """
213
+
214
+ :param disp:
215
+ :param field:
216
+ :param loading:
217
+ :param fluctuation_scaling:
218
+ :param deformation_scaling:
219
+ :param plot_loading:
220
+ :param plot_boundary:
221
+ :param shading:
222
+ :param file:
223
+ :param vmin:
224
+ :param vmax:
225
+ :return:
226
+ """
227
+ def apply_mask(triang, alpha=0.4):
228
+ # Mask triangles with sidelength greater than a threshold alpha
229
+ triangles = triang.triangles
230
+ # Mask off unwanted triangles.
231
+ x = triang.x
232
+ y = triang.y
233
+ xtri = x[triangles] - np.roll(x[triangles], 1, axis=1)
234
+ ytri = y[triangles] - np.roll(y[triangles], 1, axis=1)
235
+ maxi = np.max(np.sqrt(xtri**2 + ytri**2), axis=1)
236
+ # apply masking
237
+ triang.set_mask(maxi > alpha)
238
+
239
+ if loading is None:
240
+ loading = torch.eye(3, dtype=disp.dtype, device=disp.device)
241
+
242
+ n_loadings = loading.shape[0]
243
+
244
+ #if problem.n_dim == 2:
245
+ # disp = disp.transpose(-1, -2)
246
+
247
+ deformed_coords = problem.get_deformed_coordinates(
248
+ disp, loading, fluctuation_scaling=fluctuation_scaling, deformation_scaling=deformation_scaling
249
+ )
250
+ loading_coords = problem.get_deformed_coordinates(
251
+ disp, loading, fluctuation_scaling=0.0, deformation_scaling=deformation_scaling
252
+ )
253
+ coords = problem.get_node_coords()
254
+ #boundary_idx = problem.get_boundary_idx()
255
+ #boundary = coords[..., boundary_idx[0], boundary_idx[1]]
256
+ #deformed_boundary = deformed_coords[..., boundary_idx[0], boundary_idx[1]]
257
+ #loading_boundary = loading_coords[..., boundary_idx[0], boundary_idx[1]]
258
+
259
+ loadings = [
260
+ rf"$\bar{{\boldsymbol{{\varepsilon}}}}=\bar{{\boldsymbol{{\varepsilon}}}}^{{({i + 1})}}$"
261
+ for i in range(n_loadings)
262
+ ]
263
+
264
+ if figsize is None:
265
+ figsize = [6.3, 2.0]
266
+
267
+ if problem.n_dim == 3:
268
+ N_cut = problem.n_grid[-1] // 2
269
+ field = field[..., N_cut]
270
+ deformed_coords = deformed_coords[..., N_cut]
271
+
272
+ fig, ax = plt.subplots(1, n_loadings, figsize=figsize, dpi=300, squeeze=False)
273
+ for load_idx, load_name in enumerate(loadings):
274
+ ax[0, load_idx].axis("off")
275
+ ax[0, load_idx].set_aspect("equal")
276
+ ax[0, load_idx].set_title(load_name)
277
+ tri = Triangulation(deformed_coords[load_idx, 0].ravel(), deformed_coords[load_idx, 1].ravel())
278
+ apply_mask(tri, alpha=0.02)
279
+
280
+ if vmin is None:
281
+ vmin_idx = field[load_idx].min().item()
282
+ else:
283
+ vmin_idx = vmin[load_idx]
284
+ if vmax is None:
285
+ vmax_idx = 0.5 * field[load_idx].max().item()
286
+ else:
287
+ vmax_idx = vmax[load_idx]
288
+
289
+ tpc = ax[0, load_idx].tripcolor(
290
+ tri, field[load_idx].ravel(), cmap="jet", shading=shading, rasterized=True, vmin=vmin_idx, vmax=vmax_idx
291
+ )
292
+
293
+ if plot_loading:
294
+ ax[0, load_idx].plot(
295
+ loading_boundary[load_idx, 0].ravel(),
296
+ loading_boundary[load_idx, 1].ravel(),
297
+ "k--",
298
+ lw=1,
299
+ )
300
+ if plot_boundary:
301
+ ax[0, load_idx].plot(
302
+ deformed_boundary[load_idx, 0].ravel(),
303
+ deformed_boundary[load_idx, 1].ravel(),
304
+ "r--",
305
+ lw=1,
306
+ )
307
+ clb = fig.colorbar(tpc)
308
+ clb.ax.set_title(r"$||\boldsymbol{\sigma}|| \,[\mathrm{GPa}]$")
309
+ plt.tight_layout()
310
+ if file is not None:
311
+ plt.savefig(file, dpi=300)
312
+ plt.show()
313
+
314
+
315
+ def plot_deformed_rve_3d(
316
+ problem,
317
+ disp,
318
+ field,
319
+ loadings,
320
+ fluctuation_scaling: float = 1.0,
321
+ deformation_scaling: float = 1.0,
322
+ file: Optional[str] = None,
323
+ vmin: Optional[List[float]] = None,
324
+ vmax: Optional[List[float]] = None,
325
+ figsize: Optional[List[float]] = None
326
+ ):
327
+ disp = torch.nn.functional.pad(disp, pad=[0,1,0,1,0,1], mode="circular")
328
+ coords = problem.get_node_coords().to(dtype=disp.dtype, device=disp.device)
329
+ deformations = problem.get_deformations(disp.transpose(-1, -3), loadings, fluctuation_scaling=1.0)[0]
330
+
331
+ x, y, z = coords[2].cpu().numpy(), coords[1].cpu().numpy(), coords[0].cpu().numpy()
332
+ grid = pv.StructuredGrid(x, y, z)
333
+ grid['vectors'] = deformations.flatten(start_dim=1).T.cpu().numpy() * deformation_scaling
334
+ warped = grid.warp_by_vector()
335
+
336
+ pl = pv.Plotter()
337
+ pl.add_mesh(warped, scalars=field.numpy().ravel(), clim=[vmin, vmax], label="stress norm", cmap="jet", lighting=True, diffuse=0.2, specular=1.0, ambient=0.6, scalar_bar_args={"vertical": True})
338
+ pl.screenshot(filename=file, window_size=figsize)
339
+ pl.show()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: unocg
3
- Version: 0.0.2
3
+ Version: 0.0.3
4
4
  Summary: UNO-CG
5
5
  Author-email: Julius Herb <herb@mib.uni-stuttgart.de>, Felix Fritzen <fritzen@mib.uni-stuttgart.de>
6
6
  License-Expression: MIT
@@ -20,9 +20,9 @@ Requires-Dist: torch==2.10.0
20
20
  Requires-Dist: tqdm~=4.67.1
21
21
  Requires-Dist: torchvision~=0.25.0
22
22
  Requires-Dist: pyvista[jupyter]~=0.46.5
23
+ Requires-Dist: jupyterlab~=4.5.3
23
24
  Provides-Extra: dev
24
25
  Requires-Dist: jupytext~=1.16.4; extra == "dev"
25
- Requires-Dist: jupyterlab~=4.5.3; extra == "dev"
26
26
  Requires-Dist: pre_commit~=3.8.0; extra == "dev"
27
27
  Requires-Dist: sphinx~=7.4.7; extra == "dev"
28
28
  Requires-Dist: sphinx-rtd-theme~=2.0.0; extra == "dev"
@@ -22,8 +22,12 @@ unocg/solvers/torch.py,sha256=F0Doet7XHUkEhQVna9vBxhk-2UlgcQ-Aaa0hOd68yfs,9185
22
22
  unocg/transforms/__init__.py,sha256=NSSr3L0kcmuPB3y_MmYPoBHzT3VqmRZtQGzr_FfhvcA,63
23
23
  unocg/transforms/base.py,sha256=zQScueP3bU769xvvHnaycG2pMLlaSytESOCtlw8gx6s,321
24
24
  unocg/transforms/fourier.py,sha256=2EFYbJFyLjYyQWeOI9GPjzZ8K8DnC1OgXU3FJXYRcY0,12167
25
- unocg-0.0.2.dist-info/licenses/LICENSE,sha256=I0LwzQ8EmzsgCzCfTbRJOkQjacvRn1x7s4g0QW-8piU,1130
26
- unocg-0.0.2.dist-info/METADATA,sha256=K49ewut5lFF4zg0XME0rv_nRZyKDbMo1vvoD6F6j4WE,8112
27
- unocg-0.0.2.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
28
- unocg-0.0.2.dist-info/top_level.txt,sha256=y40WDoSbdk6M_f1_SIW7hRlUNci0U6P1bTyuz4rGGQU,6
29
- unocg-0.0.2.dist-info/RECORD,,
25
+ unocg/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
+ unocg/utils/data.py,sha256=u7ZxKPyklvqrkvIRsImmrjr3orkstMPpvT8pCgkUnTg,2848
27
+ unocg/utils/evaluation.py,sha256=PzmnJQJEIEX9b2nMrpypGmTyIon5x0eiSi4CiiIMZC8,1904
28
+ unocg/utils/plotting.py,sha256=ueaj6Z7rkFYGg1-kfJv45Tt8gz1lSg2hg6qoQ6cxaaI,11815
29
+ unocg-0.0.3.dist-info/licenses/LICENSE,sha256=I0LwzQ8EmzsgCzCfTbRJOkQjacvRn1x7s4g0QW-8piU,1130
30
+ unocg-0.0.3.dist-info/METADATA,sha256=S19-XBW5uvWtEaJsCYKXNSHc_Kf3e13vQnfz_hHp5jo,8096
31
+ unocg-0.0.3.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
32
+ unocg-0.0.3.dist-info/top_level.txt,sha256=y40WDoSbdk6M_f1_SIW7hRlUNci0U6P1bTyuz4rGGQU,6
33
+ unocg-0.0.3.dist-info/RECORD,,
File without changes