petal-qc 0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of petal-qc might be problematic. Click here for more details.

Files changed (51) hide show
  1. petal_qc/BTreport/CheckBTtests.py +321 -0
  2. petal_qc/BTreport/__init__.py +0 -0
  3. petal_qc/BTreport/bustapeReport.py +144 -0
  4. petal_qc/__init__.py +14 -0
  5. petal_qc/metrology/Cluster.py +90 -0
  6. petal_qc/metrology/DataFile.py +47 -0
  7. petal_qc/metrology/PetalMetrology.py +327 -0
  8. petal_qc/metrology/__init__.py +0 -0
  9. petal_qc/metrology/all2csv.py +57 -0
  10. petal_qc/metrology/analyze_locking_points.py +597 -0
  11. petal_qc/metrology/cold_noise.py +106 -0
  12. petal_qc/metrology/comparisonTable.py +59 -0
  13. petal_qc/metrology/convert_mitutoyo.py +175 -0
  14. petal_qc/metrology/convert_smartscope.py +145 -0
  15. petal_qc/metrology/coreMetrology.py +402 -0
  16. petal_qc/metrology/data2csv.py +63 -0
  17. petal_qc/metrology/do_Metrology.py +117 -0
  18. petal_qc/metrology/flatness4nigel.py +157 -0
  19. petal_qc/metrology/gtkutils.py +120 -0
  20. petal_qc/metrology/petal_flatness.py +353 -0
  21. petal_qc/metrology/show_data_file.py +118 -0
  22. petal_qc/metrology/testSummary.py +37 -0
  23. petal_qc/metrology/test_paralelism.py +71 -0
  24. petal_qc/thermal/CSVImage.py +69 -0
  25. petal_qc/thermal/DebugPlot.py +76 -0
  26. petal_qc/thermal/IRBFile.py +768 -0
  27. petal_qc/thermal/IRCore.py +110 -0
  28. petal_qc/thermal/IRDataGetter.py +359 -0
  29. petal_qc/thermal/IRPetal.py +1338 -0
  30. petal_qc/thermal/IRPetalParam.py +71 -0
  31. petal_qc/thermal/PetalColorMaps.py +62 -0
  32. petal_qc/thermal/Petal_IR_Analysis.py +142 -0
  33. petal_qc/thermal/PipeFit.py +598 -0
  34. petal_qc/thermal/__init__.py +0 -0
  35. petal_qc/thermal/analyze_IRCore.py +417 -0
  36. petal_qc/thermal/contours.py +378 -0
  37. petal_qc/thermal/create_IRCore.py +185 -0
  38. petal_qc/thermal/pipe_read.py +182 -0
  39. petal_qc/thermal/show_IR_petal.py +420 -0
  40. petal_qc/utils/Geometry.py +756 -0
  41. petal_qc/utils/Progress.py +182 -0
  42. petal_qc/utils/__init__.py +0 -0
  43. petal_qc/utils/all_files.py +35 -0
  44. petal_qc/utils/docx_utils.py +186 -0
  45. petal_qc/utils/fit_utils.py +188 -0
  46. petal_qc/utils/utils.py +180 -0
  47. petal_qc-0.0.0.dist-info/METADATA +29 -0
  48. petal_qc-0.0.0.dist-info/RECORD +51 -0
  49. petal_qc-0.0.0.dist-info/WHEEL +5 -0
  50. petal_qc-0.0.0.dist-info/entry_points.txt +3 -0
  51. petal_qc-0.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,597 @@
1
+ #!/usr/bin/env python3
2
+ """Analyze the planarity of locking points."""
3
+ import math
4
+ import os
5
+ import sys
6
+ import re
7
+ import tempfile
8
+ from argparse import ArgumentParser
9
+
10
+ import matplotlib.pyplot as plt
11
+ import numpy as np
12
+
13
+ import petal_qc.utils.docx_utils as docx_utils
14
+ from petal_qc.metrology import DataFile
15
+ from petal_qc.metrology.Cluster import cluster_points
16
+ from petal_qc.utils.Geometry import fit_plane
17
+ from petal_qc.utils.Geometry import flatness_conhull
18
+ from petal_qc.utils.Geometry import project_to_plane
19
+ from petal_qc.utils.Geometry import vector_angle
20
+ from petal_qc.utils.utils import get_min_max
21
+ from petal_qc.metrology.show_data_file import show_data
22
+
23
+ figure_width = 14
24
+
25
+
26
+ def remove_outliers(data, cut=2.0, outliers=False, debug=False):
27
+ """Remove points far away form the rest.
28
+
29
+ Args:
30
+ ----
31
+ data : The data
32
+ cut: max allowed distance
33
+ outliers: if True, return the outliers rhater than remove them
34
+ debug: be verbose if True.
35
+
36
+ """
37
+ d = np.abs(data - np.median(data))
38
+ mdev = np.median(d)
39
+ s = d / (mdev if mdev else 1.)
40
+ if outliers:
41
+ indx = np.where(s > cut)[0]
42
+ else:
43
+ indx = np.where(s < cut)[0]
44
+
45
+ return indx
46
+
47
+
48
+ def find_locking_point_plane(data, zplane, splane):
49
+ """Find the locking point plane.
50
+
51
+ Args:
52
+ ----
53
+ data: the data
54
+ zplane: estimation of plane Z
55
+ splane: band of width for plane Z
56
+
57
+ Returns
58
+ -------
59
+ V: matrix to plroject plane
60
+ M: mean value
61
+
62
+ """
63
+ # Select the LP points
64
+ Z = data[:, 2]
65
+
66
+ indx = np.where(abs(Z-zplane) < splane)[0]
67
+
68
+ # Fit the first plane
69
+ plane, V, M, L = fit_plane(data[indx])
70
+
71
+ # Find the three clusters
72
+ clst = cluster_points(plane, 10.0)
73
+
74
+ good = []
75
+ nclst = len(clst)
76
+ for i in range(0, nclst):
77
+ good_points = remove_outliers(plane[clst[i].xtra, 2], 0.025)
78
+ G = [clst[i].xtra[j] for j in good_points]
79
+ good.extend(G)
80
+
81
+ goodP = [indx[j] for j in good]
82
+ _, V, M, L = fit_plane(data[goodP])
83
+
84
+ return V, M
85
+
86
+
87
+ def analyze_locking_points(fname, options):
88
+ """Analyze locking points.
89
+
90
+ Args:
91
+ ----
92
+ fname: Input file
93
+ options: Program options.
94
+
95
+ """
96
+ document = docx_utils.Document()
97
+ if options.title:
98
+ document.add_heading(options.title, 0)
99
+
100
+ # Open the file
101
+ M = DataFile.read(fname, label=options.label, type=options.type)
102
+ if M is None:
103
+ print("Input file not found.")
104
+ return
105
+
106
+ analyze_locking_point_data(M,
107
+ nbins=options.nbins,
108
+ document=document,
109
+ save=options.save,
110
+ prefix=options.prefix)
111
+
112
+ document.save(options.out)
113
+
114
+
115
+ def find_locator_clusters(data, distance=10, cut=2):
116
+ """Find locators in data.
117
+
118
+ It search for clusters in X, Y.
119
+
120
+ Args:
121
+ ----
122
+ data: Input data
123
+ distance: distance cut for cluster finding.
124
+ Points farther than this belowg to other cluster.
125
+ cut: cut to remove outliers.
126
+
127
+ Returns
128
+ -------
129
+ tuple with lsit of lcusters, rotation and offset
130
+
131
+ """
132
+ cindx = []
133
+ # Find clusters
134
+ clst = cluster_points(data, distance)
135
+ # Trim list
136
+ clst = [C for C in clst if C.N > 3]
137
+ out = clst
138
+
139
+ # Now find the bottom and right locking point.
140
+ # bottom is the one farthest from the others
141
+ nclst = len(clst)
142
+ print("Found {} clusters".format(nclst))
143
+ dst = []
144
+ for i in range(0, nclst):
145
+ D = 0
146
+ Pi = np.array([clst[i].x, clst[i].y])
147
+ for j in range(0, nclst):
148
+ if i != j:
149
+ Pj = np.array([clst[j].x, clst[j].y])
150
+ D += np.sum(np.square(Pi-Pj))
151
+ dst.append(D)
152
+
153
+ # select point trully lying on the locators.
154
+ good_points = remove_outliers(data[clst[i].xtra, 2], cut=cut)
155
+ G = [clst[i].xtra[j] for j in good_points]
156
+ clst[i].xtra = G
157
+
158
+ # Now get the transform
159
+ ibot = np.argmax(dst)
160
+ LPb = clst[ibot]
161
+ cindx.append(ibot)
162
+ offset = -np.array([LPb.x, LPb.y])
163
+
164
+ x = 0.0
165
+ y = 0.0
166
+ n = 0.0
167
+ for i in range(0, nclst):
168
+ if i == ibot:
169
+ continue
170
+
171
+ x += clst[i].x
172
+ y += clst[i].y
173
+ n += 1
174
+ cindx.append(i)
175
+ if n == 0:
176
+ return [], np.array([[1, 0], [0, 1]]), offset
177
+
178
+ top = np.array([x/n, y/n])
179
+ angle = -math.atan2(top[1]+offset[1], top[0]+offset[0])
180
+ ct = math.cos(angle)
181
+ st = math.sin(angle)
182
+ R = np.array([[ct, -st], [st, ct]])
183
+ out = [clst[i] for i in cindx]
184
+
185
+ return out, R, offset
186
+
187
+
188
+ def analyze_locking_point_data(orig_data, nbins=50, plane_fit=True, cut=3, document=None, save=None, prefix=None):
189
+ """Analyze the locking point data.
190
+
191
+ Args:
192
+ ----
193
+ orig_data: The data
194
+ nbins: number of bins in histograms.
195
+ plane_fit: if True, fit the core plane. Set to false if data
196
+ is already in the core plane reference
197
+ cut: cut to remove outliers
198
+ document: the MS word document
199
+ save: True to save the figures
200
+ prefix: A prefix for the figura name
201
+
202
+ Return
203
+ ------
204
+ dictionary with data for DB.
205
+
206
+ """
207
+ outDB = {}
208
+ if document:
209
+ document.add_heading('Locking points', level=1)
210
+
211
+ all_data = None
212
+ parallelism = 0
213
+ mean_dist = 0
214
+ dist = []
215
+ fig = show_data(orig_data, "All points", view=2, out_file=all_data, color_bar=True)
216
+ if plane_fit:
217
+ # Find locator and surface points
218
+ # A little bit of a 'brute force' approach...
219
+ loc_indx = []
220
+ indx = []
221
+ iloc_indx = [[], [], []]
222
+ ref_pts = np.array([[0, 0], [127.916, 592.616], [-127.916, 592.616]])
223
+ for i, P in enumerate(orig_data):
224
+ dd = np.zeros(3)
225
+ for j in range(3):
226
+ dd[j] = np.linalg.norm(P[0:2] - ref_pts[j, :])
227
+
228
+ idist = np.argmin(dd)
229
+ dist = dd[idist]
230
+ if dist < 7.5 and P[2] < -0.1 and i > 10:
231
+ loc_indx.append(i)
232
+ iloc_indx[idist].append(i)
233
+ else:
234
+ indx.append(i)
235
+
236
+ # clean up locator points
237
+ loc_indx = []
238
+ for j in range(3):
239
+ ilc = remove_outliers(orig_data[iloc_indx[j], 2], cut=4, outliers=True)
240
+ iloc_indx[j] = [i for k, i in enumerate(iloc_indx[j]) if k not in ilc]
241
+ loc_indx.extend(iloc_indx[j])
242
+
243
+ # Fit to the core plane
244
+ M, TM, avg, *_ = fit_plane(orig_data[indx], use_average=7)
245
+
246
+ # project all data to the plane
247
+ M = project_to_plane(orig_data, TM, [0., 0., avg[2]])
248
+ Zmean = np.mean(M[indx, 2])
249
+ M[:, 2] -= Zmean
250
+
251
+ # Find locators.
252
+ locators = M[loc_indx]
253
+
254
+ # Normal
255
+ N = np.array([0, 0, 1]).T
256
+
257
+ # find locator distance to plane.
258
+ vdL = np.dot(locators, N)
259
+ mean_dist = np.mean(vdL) + 0.435
260
+ else:
261
+ M = orig_data
262
+ loc_indx = remove_outliers(orig_data[:, 2], cut=10) # , outliers=True)
263
+ locators = M[loc_indx]
264
+
265
+ if document is None:
266
+ all_data = os.path.join(tempfile.gettempdir(), "all_data.png")
267
+
268
+ # np.savetxt("all_data.csv", M, delimiter=',')
269
+ fig = show_data(M, "All points", view=2, out_file=all_data, color_bar=True)
270
+ ax = fig.get_axes()
271
+ y_lim = ax[-1].get_ylim()
272
+ ax[-1].fill_between([-0.535, -0.335], y_lim[0], y_lim[1], facecolor="grey", alpha=0.1)
273
+ if document:
274
+ txt = """All data: {} points. Left shows points after fit to core plane. \
275
+ Right shows the Z projection. Core points should be centered at 0, \
276
+ locators should be at the left within the gray area."""
277
+ document.add_picture(fig, True, figure_width,
278
+ caption=re.sub(' +', ' ', txt).format(len(M)))
279
+ plt.close(fig)
280
+
281
+ # Try to find the locking points and fit to their own plane.
282
+ # Locking points are about 0.4mm below the petal plane.
283
+ Zp = locators[:, 2]
284
+
285
+ # Show the Z band
286
+ Zband = Zp + 0.435
287
+ vmin, vmax, all_band = get_min_max(Zband)
288
+ parallelism = all_band
289
+
290
+ print("Parallelism {:.3f} mm".format(parallelism))
291
+
292
+ lxmin = min(vmin, -0.1)
293
+ lxmax = max(vmax, 0.1)
294
+ lxmin -= 0.1*abs(lxmin)
295
+ lxmax += 0.1*abs(lxmax)
296
+ xaxis = np.linspace(lxmin, lxmax, 50)
297
+
298
+ parallel_fig = plt.figure(tight_layout=True)
299
+ ax = parallel_fig.add_subplot(1, 1, 1)
300
+
301
+ ax.plot([], [], ' ', label="All: band {:.3f} mm".format(all_band))
302
+ max_avg = -9999
303
+ max_par = -9999
304
+ for i in range(3):
305
+ pts = M[iloc_indx[i], 2] + 0.435
306
+ _, _, band = get_min_max(pts)
307
+ avg_pts = np.mean(pts)
308
+ if abs(avg_pts) > max_par:
309
+ max_par = abs(avg_pts)
310
+ max_avg = avg_pts
311
+
312
+ print("Loc par. {} - {:.3f} avg: {:.3f}".format(i, band, avg_pts))
313
+ ax.hist(pts, bins=xaxis, label="Avg {:.3f} band {:.3f} mm".format(avg_pts, band))
314
+
315
+ parallelism = max_avg
316
+
317
+ ax.legend(loc='upper left')
318
+ y_lim = ax.get_ylim()
319
+ ax.fill_between([-0.100, 0.100], y_lim[0], y_lim[1], facecolor="grey", alpha=0.1)
320
+ ax.plot([mean_dist, mean_dist], [y_lim[0], y_lim[1]], '-')
321
+ ax.text(1.05*mean_dist, 0.95*y_lim[1], "Offset {:.3f}".format(mean_dist))
322
+ ax.set_xlabel("Out of plane (mm)")
323
+ ax.grid()
324
+
325
+ out = np.where(abs(Zband) > 0.1)
326
+ nout = len(out[0])
327
+ txt = "Passed"
328
+ if nout > 0:
329
+ txt = "Failed"
330
+
331
+ outDB["PARALLELISM"] = parallelism
332
+ outDB["OFFSET"] = mean_dist
333
+ print("Paralelism test: {:.4f}.\n{}".format(parallelism, txt))
334
+ if document:
335
+ txt = """To study parallelism, we subtract -0.435 mm to the Z values. \
336
+ This is the nominal position of locator points. \
337
+ Valid points should be within a ±100 µm band around 0. \
338
+ This is shown in the plots below.\nParalelism is defined as the maximum deviation \
339
+ of all locator Z values. A perfect core shoud have an absolute value below 100 µm."""
340
+ document.add_paragraph("Parallelism: {:.3f} mm".format(parallelism))
341
+ document.add_paragraph("Parallelism: Number of points outside band {:d}".format(nout))
342
+ document.add_paragraph(re.sub(' +', ' ', txt))
343
+ document.add_picture(parallel_fig, True, figure_width,
344
+ caption="Paralelism. All points should lie withn the grey band.")
345
+ plt.close(parallel_fig)
346
+
347
+ parallel_fig = plt.figure(tight_layout=True)
348
+ ax = parallel_fig.add_subplot(1, 1, 1)
349
+ ax.plot(locators[:, 0], Zband, 'o')
350
+ ax.fill_between(ax.get_xlim(), -0.1, 0.1, facecolor="grey", alpha=0.1)
351
+ ax.set_xlabel("Petal X (mm)")
352
+ ax.set_ylabel("Out of plane (mm)")
353
+ ax.set_title("Paralellism lock points and petal core.")
354
+ x_lim = ax.get_xlim()
355
+ ax.plot([x_lim[0], x_lim[1]], [mean_dist, mean_dist], '-')
356
+ ax.text(0.0, mean_dist + 0.1*abs(mean_dist), "Offset. {:.3f}".format(mean_dist))
357
+ ax.grid()
358
+ if document:
359
+ document.add_picture(parallel_fig, True, figure_width,
360
+ caption="Paralelism. All points should lie withn the grey band.")
361
+ plt.close(parallel_fig)
362
+
363
+ out_file = None
364
+ name = "LP_core_plane"
365
+ if save is True:
366
+ if prefix is None:
367
+ out_file = "{}.png".format(name)
368
+ else:
369
+ out_file = "{}-{}.png".format(prefix, name)
370
+
371
+ plt.savefig(out_file, dpi=300)
372
+
373
+ # Fit the locator plane
374
+ plane, V, X, L = fit_plane(locators, use_average=4)
375
+
376
+ # find the parallelism.
377
+ # We do this computing the angle between planes
378
+ angle = vector_angle([0., 0., 1.], V[:, 2])
379
+ print("parallelism (angle): {:.4f}".format(angle))
380
+
381
+ if document:
382
+ document.add_paragraph("Parallelism: angle between core plane and locator plane {:.4f} rad".format(angle))
383
+ document.add_paragraph("Locking point distance to petal surface: {:.3f} mm".format(X[2]))
384
+
385
+ # Cluster locator points to find the actual locators
386
+ print("Clusters after plane fit.")
387
+ clst, R, O = find_locator_clusters(locators, 10)
388
+ # transform
389
+ for C in clst:
390
+ P = np.array([C.x, C.y])
391
+ P = np.matmul(R, P+O)
392
+ C.x = P[0]
393
+ C.y = P[1]
394
+
395
+ for i in range(locators.shape[0]):
396
+ locators[i, 0:2] = np.matmul(R, locators[i, 0:2] + O)
397
+
398
+ print("Cluster Z")
399
+ good = []
400
+ for C in clst:
401
+ good.extend(C.xtra)
402
+ print("{:7.3f} {:7.3f} {:7.3f}".format(C.x, C.y, C.z))
403
+
404
+ if len(good):
405
+ flatness = flatness_conhull(plane[good, :])
406
+ print("Locking point flatness: {:.1f}".format(1000*flatness))
407
+ else:
408
+ print("Not enough points to compute flatness.")
409
+ flatness = -1.e-3
410
+
411
+ outDB["COPLANARITY_LOCATORS"] = flatness
412
+ if document:
413
+ document.add_paragraph("Locking point flatness (convex hull): {:.1f} µm.".format(1000*flatness))
414
+
415
+ # Show all data
416
+ fig = show_data(plane[good, :], "All", nbins=nbins, color_bar=True)
417
+ if document:
418
+ document.add_paragraph("All data points in locator plane.")
419
+ document.add_picture(fig, True, figure_width, caption="All points in locator plane.")
420
+ document.add_paragraph("Locator points.")
421
+ plt.close(fig)
422
+
423
+ print("Clusters:")
424
+ for iclst, C in enumerate(clst):
425
+ if save:
426
+ if prefix is None:
427
+ out_file = "cluster_{}.png".format(iclst)
428
+ else:
429
+ out_file = "{}-cluster_{}.png".format(prefix, iclst)
430
+ else:
431
+ out_file = None
432
+
433
+ print("\tCluster {}: {:.3f} {:.3f} {:.3f}".format(iclst, C.x, C.y, C.z))
434
+ fig = show_data(plane[C.xtra, :],
435
+ "Cluster: {:.3f}, {:.3f}".format(C.x, C.y),
436
+ nbins=nbins,
437
+ color_bar=True)
438
+ zmean = np.mean(plane[C.xtra, 2])
439
+ zstd = np.std(plane[C.xtra, 2])
440
+ print("\t\t-> Z mean {:.3f} std {:.3f} [{}]".format(zmean, zstd, len(C.xtra)))
441
+ if document:
442
+ document.add_picture(fig, True, figure_width,
443
+ caption="Cluster {}. Number of points {}".format(iclst, len(C.xtra)))
444
+ plt.close(fig)
445
+
446
+ return outDB
447
+
448
+
449
+ def locking_point_positions(positions, document=None):
450
+ """Make a report on locking point positions.
451
+
452
+ Args:
453
+ ----
454
+ positions: Measured values
455
+ document: The document (docx). Defaults to None.
456
+
457
+ Return
458
+ ------
459
+ outDB: dictionary witf DB paremeters.
460
+
461
+ """
462
+ # Nominal values for "front". Back have oposite sign on X.
463
+ outDB = {}
464
+ key_delta = ["PL01", "PL02", "PL03"]
465
+ key_rel_pos = ["PL01-FD01", "PL01-FD02", "FD01-FD02"]
466
+ nom_values = (None,
467
+ np.array((0, 0, 4)), # Bottom locator (PL01)
468
+ np.array((127.916, 592.616, 4)), # Top slot (PL02)
469
+ np.array((-127.916, 592.616, 5)), # Top Locator (PL03)
470
+ np.array((0, 3, 0.3)), # Bottom Fid. (FD01)
471
+ np.array((131.104, 589.526, 0.3))) # Top Fid (FFD02)
472
+
473
+ rel_nom = [
474
+ nom_values[1][0:2] - nom_values[4][0:2], # PL01-FD01
475
+ nom_values[1][0:2] - nom_values[5][0:2], # PL01-FD02
476
+ nom_values[4][0:2] - nom_values[5][0:2], # FD01-FD02
477
+ ]
478
+ deltas = []
479
+ factor = 1.0
480
+ if nom_values[2][0] * positions[1, 0] < 0:
481
+ factor = -1
482
+
483
+ delta_pos = []
484
+ for i, val in enumerate(nom_values):
485
+ if val is None:
486
+ continue
487
+
488
+ xxx = positions[i-1, :]
489
+ xxx[0] *= factor
490
+ dt = val - xxx
491
+ dt_abs = np.sqrt(dt[0]**2+dt[1]**2)
492
+ if i != 4:
493
+ delta_pos.append(dt)
494
+
495
+ row = [xxx[0], xxx[1], xxx[2], abs(dt[0]), abs(dt[1]), dt_abs, dt[2]]
496
+ deltas.append(row)
497
+
498
+ rel_pos = [
499
+ np.array([deltas[0][0]-deltas[3][0], deltas[0][1]-deltas[3][1]]), # PL01-FD01
500
+ np.array([deltas[0][0]-deltas[4][0], deltas[0][1]-deltas[4][1]]), # PL01-FD02
501
+ np.array([deltas[3][0]-deltas[4][0], deltas[3][1]-deltas[4][1]]), # FD01-FD02
502
+ ]
503
+ outDB["LOCATION_DELTA"] = dict(zip(key_delta, [v[0:2].tolist() for v in delta_pos]))
504
+ outDB["REL_POS_DELTA"] = dict(zip(key_rel_pos, [(rel_nom[i] - rel_pos[i]).tolist() for i in range(3)]))
505
+ outDB["FD01_DIAM"] = deltas[3][2]
506
+ outDB["FD02_DIAM"] = deltas[4][2]
507
+ outDB["CHECK_BOT_LOC_DIAM"] = deltas[0][6]
508
+ outDB["CHECK_OVERSIZE_LOC_DIAM"] = deltas[2][6]
509
+ outDB["CHECK_SLOT_LOC_DIAM"] = deltas[1][6]
510
+
511
+ nPL1 = np.linalg.norm(nom_values[1][0:2] - nom_values[4][0:2])
512
+ nPL2 = np.linalg.norm(nom_values[2][0:2] - nom_values[5][0:2])
513
+
514
+ dPL1 = np.linalg.norm(positions[0, 0:2] - positions[3, 0:2])
515
+ dPL2 = np.linalg.norm(positions[1, 0:2] - positions[4, 0:2])
516
+ deltaPL1 = (nPL1-dPL1)
517
+ deltaPL2 = (nPL2-dPL2)
518
+ fPL1 = "PASSED" if abs(deltaPL1) <= 0.025 else "FAILED"
519
+ fPL2 = "PASSED" if abs(deltaPL2) <= 0.025 else "FAILED"
520
+
521
+ for key, val in outDB["REL_POS_DELTA"].items():
522
+ deltaPL = np.linalg.norm(val)
523
+ fPL = "PASSED" if abs(deltaPL) <= 0.025 else "FAILED"
524
+ print("Distance {}: {:.3f} mm ({})".format(key, deltaPL, fPL))
525
+
526
+ if document:
527
+ document.add_heading('Position of Locking Points and Fiducials', level=1)
528
+ txt = """Position of locators and deviation from nominal positions. \
529
+ The table below shows the nominal positions."""
530
+ document.add_paragraph(re.sub(' +', ' ', txt))
531
+ table = document.insert_table(rows=6, cols=4, caption="Nominal Position of Locking Points and fiducials.")
532
+ table.style = document.styles['Table Grid']
533
+
534
+ header = ("", "X(mm)", "Y (mm)", "∅ (mm)")
535
+ for i, C in enumerate(header):
536
+ table.rows[0].cells[i].text = C
537
+
538
+ items = ("", "Bottom Loc. (PL01)", "Top Slot (PL02)",
539
+ "Top Loc. (PL03)", "Bottom Fid. (FD01)", "Top Fid. (FD02)")
540
+ for i, C in enumerate(items):
541
+ table.rows[i].cells[0].text = C
542
+ if nom_values[i] is not None:
543
+ for j, v in enumerate(nom_values[i]):
544
+ table.rows[i].cells[j+1].text = "{:.3f}".format(v)
545
+
546
+ document.add_paragraph("\nThe table below shows the measured positions and the actual deviations from nominal.")
547
+ table = document.insert_table(rows=6, cols=8, caption="Measured position of Locking Points and fiducials.")
548
+ table.style = document.styles['Table Grid']
549
+
550
+ header = ("", "X(mm)", "Y (mm)", "∅ (µm)", "ΔX (µm)", "ΔY (µm)", "|Δ| (µm)", "Δ∅ (µm)")
551
+ for i, C in enumerate(header):
552
+ table.rows[0].cells[i].text = C
553
+
554
+ items = ("", "PL01", "PL02", "PL03", "FD01", "FD02")
555
+ for i, C in enumerate(items):
556
+ table.rows[i].cells[0].text = C
557
+ if i:
558
+ for j, v in enumerate(deltas[i-1]):
559
+ table.rows[i].cells[j+1].text = "{:.3f}".format(v)
560
+
561
+ document.add_paragraph("")
562
+ for key, val in outDB["REL_POS_DELTA"].items():
563
+ deltaPL = np.linalg.norm(val)
564
+ fPL = "PASSED" if abs(deltaPL) <= 0.025 else "FAILED"
565
+ document.add_paragraph("Distance {}: {:.3f} mm ({})".format(key, deltaPL, fPL))
566
+
567
+ return outDB
568
+
569
+
570
+ if __name__ == "__main__":
571
+ parser = ArgumentParser()
572
+ parser.add_argument('files', nargs='*', help="Input files")
573
+ parser.add_argument("--prefix", dest='prefix', default=None)
574
+ parser.add_argument("--save", dest='save', action="store_true", default=False)
575
+ parser.add_argument("--Z-plane", dest='zplane', type=float,
576
+ default=-0.45, help="Estimated value plate Z plane")
577
+ parser.add_argument("--W-plane", dest='splane', type=float,
578
+ default=0.2, help="Estimated width in Z pf points in plale")
579
+ parser.add_argument("--out", dest="out", default="locking_points.docx",
580
+ type=str, help="The output fiel name")
581
+ parser.add_argument("--title", dest="title", default=None,
582
+ type=str, help="Document title")
583
+ parser.add_argument("--nbins", dest="nbins", default=50,
584
+ type=int, help="Number of bins")
585
+
586
+ # This is to convert a CMM file
587
+ parser.add_argument("--label", default="\\w+", help="The label to select")
588
+ parser.add_argument("--type", default="Punto", help="The class to select")
589
+
590
+ options = parser.parse_args()
591
+ if len(options.files) == 0:
592
+ print(sys.argv[0])
593
+ print("I need an input file")
594
+ sys.exit()
595
+
596
+ analyze_locking_points(options.files[0], options)
597
+ plt.show()
@@ -0,0 +1,106 @@
1
+ #!/usr/bin/env python3
2
+ """Analyze COLDBOX files."""
3
+ import sys
4
+ import os
5
+ from argparse import ArgumentParser
6
+ from pathlib import Path
7
+ import fnmatch
8
+
9
+ import matplotlib.pyplot as plt
10
+ import numpy as np
11
+ import pandas as pd
12
+
13
+
14
+ def all_files(root, patterns='*', single_level=False, yield_folders=False):
15
+ """A generator that reruns all files in the given folder.
16
+
17
+ Args:
18
+ ----
19
+ root (file path): The folder
20
+ patterns (str, optional): The pattern of the files. Defaults to '*'.
21
+ single_level (bool, optional): If true, do not go into sub folders. Defaults to False.
22
+ yield_folders (bool, optional): If True, return folders as well. Defaults to False.
23
+
24
+ Yields
25
+ ------
26
+ A file Path
27
+
28
+ """
29
+ patterns = patterns.split(';')
30
+ for path, subdirs, files in os.walk(root):
31
+ if yield_folders:
32
+ files.extend(subdirs)
33
+
34
+ files.sort()
35
+ for name in files:
36
+ for pattern in patterns:
37
+ if fnmatch.fnmatch(name, pattern):
38
+ yield os.path.join(path, name)
39
+ break
40
+
41
+ if single_level:
42
+ break
43
+
44
+
45
+ def bad_line(lst):
46
+ """Fixes lines for read_csv."""
47
+ out = [lst[i] for i in range(5)]
48
+ out.append(" ".join(lst[5:]))
49
+ return out
50
+
51
+
52
+ def analyze_folder(folder_list, options):
53
+ """Analyze files in input folder."""
54
+ for folder in folder_list:
55
+ folder = Path(folder).expanduser().resolve()
56
+ if not folder.exists():
57
+ print("### Folder {} does not exist !".format(folder))
58
+ continue
59
+
60
+ fig, ax = plt.subplots(1, 1, tight_layout=True)
61
+ fig.suptitle("Noise .vs. channel")
62
+ ax.set_xlabel("Channel")
63
+ ax.set_ylabel("innse")
64
+ meanV = []
65
+ for F in all_files(folder, '*.txt', True):
66
+ df = pd.read_csv(F,
67
+ #names=["chan", "code", "gain", "vt50", "innse", "comment"],
68
+ header=None, skiprows=lambda x: x == 0,
69
+ delim_whitespace=True,
70
+ engine="python", on_bad_lines=bad_line)
71
+ x = df[0].values
72
+ y = df[4].values
73
+ ymax = np.argmax(y)
74
+ ymin = np.argmin(y)
75
+ if y[ymax] > 2000:
76
+ print("{} Max: {}".format(ymax, y[ymax]))
77
+
78
+ if y[ymin] <= 0:
79
+ print("{} Min: {}".format(ymin, y[ymin]))
80
+
81
+ indx = np.where((y < 2500) & (y > 0))[0]
82
+ ymean = np.mean(y[indx])
83
+ meanV.append(ymean)
84
+
85
+ ax.plot(x, y, '-', label=Path(F).name)
86
+
87
+ yavg = np.nanmean(meanV)
88
+ ax.set_ylim(0.5*yavg, 1.5*yavg)
89
+ ax.grid()
90
+
91
+ #ax.legend()
92
+
93
+ plt.show()
94
+
95
+
96
+ if __name__ == "__main__":
97
+ parser = ArgumentParser()
98
+ parser.add_argument('files', nargs='*', help="Input files")
99
+
100
+ options = parser.parse_args()
101
+ if len(options.files) == 0:
102
+ print(sys.argv[0])
103
+ print("I need an input file")
104
+ sys.exit()
105
+
106
+ analyze_folder(options.files, options)