servalcat 0.4.131__cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. servalcat/__init__.py +10 -0
  2. servalcat/__main__.py +120 -0
  3. servalcat/ext.cpython-314t-x86_64-linux-gnu.so +0 -0
  4. servalcat/refine/__init__.py +0 -0
  5. servalcat/refine/cgsolve.py +100 -0
  6. servalcat/refine/refine.py +1162 -0
  7. servalcat/refine/refine_geom.py +245 -0
  8. servalcat/refine/refine_spa.py +400 -0
  9. servalcat/refine/refine_xtal.py +339 -0
  10. servalcat/refine/spa.py +151 -0
  11. servalcat/refine/xtal.py +312 -0
  12. servalcat/refmac/__init__.py +0 -0
  13. servalcat/refmac/exte.py +191 -0
  14. servalcat/refmac/refmac_keywords.py +660 -0
  15. servalcat/refmac/refmac_wrapper.py +423 -0
  16. servalcat/spa/__init__.py +0 -0
  17. servalcat/spa/fofc.py +488 -0
  18. servalcat/spa/fsc.py +391 -0
  19. servalcat/spa/localcc.py +197 -0
  20. servalcat/spa/realspcc_from_var.py +128 -0
  21. servalcat/spa/run_refmac.py +979 -0
  22. servalcat/spa/shift_maps.py +293 -0
  23. servalcat/spa/shiftback.py +137 -0
  24. servalcat/spa/translate.py +129 -0
  25. servalcat/utils/__init__.py +35 -0
  26. servalcat/utils/commands.py +1629 -0
  27. servalcat/utils/fileio.py +836 -0
  28. servalcat/utils/generate_operators.py +296 -0
  29. servalcat/utils/hkl.py +811 -0
  30. servalcat/utils/logger.py +140 -0
  31. servalcat/utils/maps.py +345 -0
  32. servalcat/utils/model.py +933 -0
  33. servalcat/utils/refmac.py +759 -0
  34. servalcat/utils/restraints.py +888 -0
  35. servalcat/utils/symmetry.py +298 -0
  36. servalcat/xtal/__init__.py +0 -0
  37. servalcat/xtal/french_wilson.py +262 -0
  38. servalcat/xtal/run_refmac_small.py +240 -0
  39. servalcat/xtal/sigmaa.py +1954 -0
  40. servalcat/xtal/twin.py +316 -0
  41. servalcat-0.4.131.dist-info/METADATA +60 -0
  42. servalcat-0.4.131.dist-info/RECORD +45 -0
  43. servalcat-0.4.131.dist-info/WHEEL +6 -0
  44. servalcat-0.4.131.dist-info/entry_points.txt +4 -0
  45. servalcat-0.4.131.dist-info/licenses/LICENSE +373 -0
servalcat/xtal/twin.py ADDED
@@ -0,0 +1,316 @@
1
+ """
2
+ Author: "Keitaro Yamashita, Garib N. Murshudov"
3
+ MRC Laboratory of Molecular Biology
4
+
5
+ This software is released under the
6
+ Mozilla Public License, version 2.0; see LICENSE.
7
+ """
8
+ from __future__ import absolute_import, division, print_function, generators
9
+ import argparse
10
+ import gemmi
11
+ import numpy
12
+ import pandas
13
+ import scipy.optimize
14
+ from servalcat.utils import logger
15
+ from servalcat import utils
16
+ from servalcat import ext
17
+
18
+ def calculate_obliquity(gv, twin_op):
19
+ """
20
+ Beforehand, the following calculation must be done
21
+ gv = gemmi.GruberVector(cell, sg.centring_type(), True)
22
+ gv.niggli_reduce()
23
+ """
24
+ def get_axis(r):
25
+ eigenvalues, eigenvectors = numpy.linalg.eig(r)
26
+ idx = numpy.argmin(numpy.abs(eigenvalues - 1))
27
+ return eigenvectors[:,idx].real
28
+ reduced_cell = gv.get_cell()
29
+ orth = numpy.array(reduced_cell.orth.mat.tolist())
30
+ frac = numpy.array(reduced_cell.frac.mat.tolist())
31
+ op = gv.change_of_basis.inverse().combine(twin_op.as_xyz()).combine(gv.change_of_basis)
32
+ r = numpy.array(op.rot) / op.DEN
33
+ u = get_axis(r)
34
+ h = get_axis(numpy.linalg.inv(r.transpose()))
35
+ tau = h.dot(frac)
36
+ t = orth.dot(u)
37
+ obl_deg = numpy.rad2deg(numpy.arccos(numpy.clip(t.dot(tau) / numpy.linalg.norm(t) / numpy.linalg.norm(tau), -1, 1)))
38
+ return obl_deg
39
+ # calculate_obliquity()
40
+
41
+ def find_twin_domains_from_data(hkldata, max_oblique=5, min_cc=0.2):
42
+ logger.writeln("Finding possible twin operators from data")
43
+ ops = gemmi.find_twin_laws(hkldata.cell, hkldata.sg, max_oblique, False)
44
+ logger.writeln(f" {len(ops)} possible twin operator(s) found")
45
+ #for op in ops:
46
+ # logger.writeln(f" {op.triplet()}")
47
+ if not ops:
48
+ logger.writeln("")
49
+ return None, None
50
+ gv = gemmi.GruberVector(hkldata.cell, hkldata.sg.centring_type(), True)
51
+ gv.niggli_reduce()
52
+ twin_data = ext.TwinData()
53
+ twin_data.setup(hkldata.miller_array(), hkldata.df.bin_ml, hkldata.sg, hkldata.cell, ops)
54
+ if "I" in hkldata.df:
55
+ Io = hkldata.df.I.to_numpy()
56
+ else:
57
+ Io = hkldata.df.FP.to_numpy()**2
58
+ ccs, nums = [], []
59
+ tmp = []
60
+ for i_bin, bin_idxes in hkldata.binned("ml"):
61
+ ccs.append([])
62
+ nums.append([])
63
+ rs = []
64
+ for i_op, op in enumerate(ops):
65
+ cc = r = numpy.nan
66
+ ii = numpy.array(twin_data.pairs(i_op, i_bin))
67
+ val = numpy.all(numpy.isfinite(Io[ii]), axis=1) if ii.size != 0 else []
68
+ if numpy.sum(val) != 0:
69
+ cc = numpy.corrcoef(Io[ii][val].T)[0,1]
70
+ r = numpy.sum(numpy.abs(Io[ii][val, 0] - Io[ii][val, 1])) / numpy.sum(Io[ii][val])
71
+ ccs[-1].append(cc)
72
+ rs.append(r)
73
+ nums[-1].append(len(val))
74
+ tmp.append(rs + ccs[-1] + nums[-1])
75
+ df = pandas.DataFrame(tmp, columns=[f"{n}_op{i+1}" for n in ("R", "CC", "num") for i in range(len(ops))])
76
+ with logger.with_prefix(" "):
77
+ logger.writeln(df.to_string(float_format="%.4f"))
78
+ ccs = numpy.array(ccs)
79
+ nums = numpy.array(nums)
80
+ tmp = [{"Operator": "h,k,l",
81
+ "Obliquity": 0,
82
+ "R_twin_obs": 0,
83
+ "CC_mean": 1}]
84
+ for i_op, op in enumerate(ops):
85
+ ii = numpy.array(twin_data.pairs(i_op))
86
+ val = numpy.all(numpy.isfinite(Io[ii]), axis=1)
87
+ if numpy.sum(val) == 0:
88
+ r_obs = numpy.nan
89
+ else:
90
+ r_obs = numpy.sum(numpy.abs(Io[ii][val, 0] - Io[ii][val, 1])) / numpy.sum(Io[ii][val])
91
+ good = numpy.isfinite(ccs[:,i_op])
92
+ cc = numpy.sum(nums[good,i_op] * ccs[good,i_op]) / numpy.sum(nums[good,i_op])
93
+ tmp.append({"Operator": op.as_hkl().triplet(),
94
+ "Obliquity": calculate_obliquity(gv, op),
95
+ "CC_mean": cc,
96
+ "R_twin_obs": r_obs,
97
+ })
98
+ df = pandas.DataFrame(tmp)
99
+ with logger.with_prefix(" "):
100
+ logger.writeln(df.to_string(float_format="%.2f"))
101
+
102
+ sel = df["CC_mean"].to_numpy() > min_cc
103
+ if sel[1:].sum() == 0:
104
+ logger.writeln(" No possible twinning detected\n")
105
+ return None, None
106
+
107
+ if 0:#not sel.all():
108
+ ops = [ops[i] for i in range(len(ops)) if sel[i+1]]
109
+ logger.writeln(f"\n Twin operators after filtering small correlations (<= {min_cc})")
110
+ df = df[sel]
111
+ with logger.with_prefix(" "):
112
+ logger.writeln(df.to_string(float_format="%.2f"))
113
+ twin_data = ext.TwinData()
114
+ twin_data.setup(hkldata.miller_array(), hkldata.df.bin_ml, hkldata.sg, hkldata.cell, ops)
115
+ twin_data.alphas = [1. / len(twin_data.alphas) for _ in range(len(twin_data.alphas)) ]
116
+ if "I" not in hkldata.df:
117
+ logger.writeln('Generating "observed" intensities for twin refinement: Io = Fo**2, SigIo = 2*F*SigFo')
118
+ hkldata.df["I"] = hkldata.df.FP**2
119
+ hkldata.df["SIGI"] = 2 * hkldata.df.FP * hkldata.df.SIGFP
120
+ logger.writeln("")
121
+ return twin_data, df
122
+
123
+ # find_twin_domains_from_data()
124
+
125
+ def estimate_twin_fractions_from_model(twin_data, hkldata, min_alpha=0.02):
126
+ logger.writeln("Estimating twin fractions")
127
+ Ic = numpy.abs(twin_data.f_calc.sum(axis=1))**2
128
+ idx_all = twin_data.twin_related(hkldata.sg)
129
+ Ic_all = Ic[idx_all]
130
+ Ic_all[(idx_all < 0).any(axis=1)] = numpy.nan
131
+ rr = twin_data.obs_related_asu()
132
+ tmp = []
133
+ P_list, cc_oc_list, weight_list = [], [], []
134
+ n_ops = len(twin_data.ops) + 1
135
+ tidxes = numpy.triu_indices(n_ops, 1)
136
+ if "CC*" in hkldata.binned_df["ml"]:
137
+ logger.writeln(" data-correlations are corrected using CC*")
138
+ for i_bin, bin_idxes in hkldata.binned("ml"): # XXX
139
+ i_tmp = Ic_all[numpy.asarray(twin_data.bin)==i_bin,:]
140
+ i_tmp = i_tmp[numpy.isfinite(i_tmp).all(axis=1)]
141
+ P = numpy.corrcoef(i_tmp.T)
142
+ iobs = hkldata.df.I.to_numpy()[bin_idxes]
143
+ ic_bin = Ic[rr[bin_idxes,:]]
144
+ val = numpy.isfinite(iobs) & numpy.isfinite(ic_bin).all(axis=1) & numpy.all(rr[bin_idxes,:]>=0, axis=1)
145
+ iobs, ic_bin = iobs[val], ic_bin[val,:]
146
+ cc_star = hkldata.binned_df["ml"]["CC*"][i_bin] if "CC*" in hkldata.binned_df["ml"] else 1
147
+ if cc_star < 0.5:
148
+ break
149
+ cc_oc = [numpy.corrcoef(iobs, ic_bin[:,i])[0,1] / cc_star for i in range(n_ops)]
150
+ P_list.append(P)
151
+ cc_oc_list.append(cc_oc)
152
+ weight_list.append(numpy.sum(val))
153
+ frac_est = numpy.dot(numpy.linalg.pinv(P), cc_oc)
154
+ frac_est /= frac_est.sum()
155
+ tmp.append(P[tidxes].tolist() + cc_oc + [weight_list[-1]] + frac_est.tolist())
156
+
157
+ good = numpy.logical_and(numpy.isfinite(P_list).any(axis=(1,2)), numpy.isfinite(cc_oc_list).any(axis=1))
158
+ P_list = numpy.array(P_list)[good]
159
+ cc_oc_list = numpy.array(cc_oc_list)[good]
160
+ weight_list = numpy.array(weight_list)[good]
161
+ P = numpy.average(P_list, axis=0, weights=weight_list)
162
+ cc_oc = numpy.average(cc_oc_list, axis=0, weights=weight_list)
163
+ frac_est = numpy.dot(numpy.linalg.pinv(P), cc_oc)
164
+ frac_est = numpy.maximum(0, frac_est)
165
+ frac_est /= frac_est.sum()
166
+ df = pandas.DataFrame(tmp, columns=[f"cc_{i+1}_{j+1}" for i, j in zip(*tidxes)] +
167
+ [f"cc_o_{i+1}" for i in range(n_ops)] +
168
+ ["nref"] + [f"raw_est_{i+1}" for i in range(n_ops)])
169
+ with logger.with_prefix(" "):
170
+ logger.writeln(df.to_string(float_format="%.4f"))
171
+ logger.write(" Final twin fraction estimate: ")
172
+ logger.writeln(" ".join("%.4f"%x for x in frac_est))
173
+ twin_data.alphas = frac_est
174
+
175
+ if numpy.logical_and(0 < frac_est, frac_est < min_alpha).any():
176
+ frac_est[frac_est < min_alpha] = 0.
177
+ frac_est /= frac_est.sum()
178
+ logger.write(" Small fraction removed: ")
179
+ logger.writeln(" ".join("%.4f"%x for x in frac_est))
180
+ twin_data.alphas = frac_est
181
+
182
+ return df
183
+
184
+ def mlopt_twin_fractions(hkldata, twin_data, b_aniso):
185
+ k_ani2_inv = 1 / hkldata.debye_waller_factors(b_cart=b_aniso)**2
186
+ Io = hkldata.df.I.to_numpy(copy=True) * k_ani2_inv
187
+ sigIo = hkldata.df.SIGI.to_numpy(copy=True) * k_ani2_inv
188
+ def fun(x):
189
+ #x = numpy.clip(x, 0, 1)
190
+ #twin_data.alphas = x.tolist() + [1-x.sum()]
191
+ twin_data.alphas = x
192
+ twin_data.est_f_true(Io, sigIo, 10)
193
+ ret = twin_data.ll(Io, sigIo)
194
+ return ret
195
+ def grad(x):
196
+ #x = numpy.clip(x, 0, 1)
197
+ #twin_data.alphas = x.tolist() + [1-x.sum()]
198
+ twin_data.alphas = x
199
+ twin_data.est_f_true(Io, sigIo, 10)
200
+ return twin_data.ll_der_alpha(Io, sigIo, True)
201
+ if 0:
202
+ bak = [_ for _ in twin_data.alphas]
203
+ with open("alpha_ll.csv", "w") as ofs:
204
+ ofs.write("a,ll,ll_new,der1,der2,der_new1,der_new2\n")
205
+ for a in numpy.linspace(0., 1.0, 100):
206
+ x = [a, 1-a]
207
+ twin_data.alphas = x
208
+ twin_data.est_f_true(Io, sigIo, 100)
209
+ f_new = twin_data.ll(Io, sigIo)
210
+ f = twin_data.ll_rice()
211
+ der = twin_data.ll_der_alpha(Io, sigIo, False)
212
+ #der = [x - der[-1] for x in der[:-1]]
213
+ der_new = twin_data.ll_der_alpha(Io, sigIo, True)
214
+ #der_new = [x - der_new[-1] for x in der_new[:-1]]
215
+ ofs.write(f"{a},{f},{f_new},{der[0]},{der[1]},{der_new[0]},{der_new[1]}\n")
216
+ ofs.write("\n")
217
+ twin_data.alphas = bak
218
+ quit()
219
+ if 0:
220
+ x0 = [x for x in twin_data.alphas]
221
+ f0 = fun(x0)
222
+ ader = grad(x0)
223
+
224
+ print(f"{ader=}")
225
+ for e in (1e-2, 1e-3, 1e-4, 1e-5):
226
+ nder = []
227
+ for i in range(len(x0)):
228
+ x = [_ for _ in x0]
229
+ x[i] += e
230
+ f1 = fun(x)
231
+ nder.append((f1 - f0) / e)
232
+ print(f"{e=} {nder=}")
233
+
234
+ logger.writeln("ML twin fraction refinement..")
235
+ num_params = len(twin_data.alphas)
236
+ logger.writeln(" starting with " + " ".join("%.4f"%x for x in twin_data.alphas))
237
+ f0 = fun(numpy.asarray(twin_data.alphas))
238
+ logger.writeln(f" f0= {f0}")
239
+ if 0:
240
+ A = numpy.ones((1, num_params))
241
+ linear_constraint = scipy.optimize.LinearConstraint(A, 0., 1.)#[1.0], [1.0])
242
+ bounds = scipy.optimize.Bounds(numpy.zeros(num_params), numpy.ones(num_params))
243
+ res = scipy.optimize.minimize(fun=fun, x0=twin_data.alphas[:-1],
244
+ bounds=bounds,
245
+ constraints=[linear_constraint],
246
+ jac=grad,
247
+ method="trust-constr", #"SLSQP",
248
+ #callback=lambda *x: logger.writeln(f"callback {x}"),
249
+ )
250
+ logger.writeln(" finished in {} iterations ({} evaluations)".format(res.nit, res.nfev))
251
+ logger.writeln(f" f = {res.fun}")
252
+ logger.writeln(str(res))
253
+ # ensure constraints
254
+ alphas = numpy.clip(res.x, 0, 1)
255
+ twin_data.alphas = alphas.tolist() + [1-alphas.sum()] #list(alphas / alphas.sum())
256
+ else:
257
+ def x2alpha(x):
258
+ x = numpy.clip(x, 0, 1)
259
+ if x.sum() > 1: x /= x.sum()
260
+ return x.tolist() + [numpy.clip(1 - x.sum(), 0, 1)]
261
+ x = twin_data.alphas[:-1]
262
+ for cyc in range(200):
263
+ der1, tmp = twin_data.ll_der_alpha(Io, sigIo, True)
264
+ #print("Der:")
265
+ #print(der1)
266
+ #print(tmp)
267
+ #print(-der1.dot(numpy.linalg.pinv(tmp)))
268
+ if 0:
269
+ bak = [_ for _ in twin_data.alphas]
270
+ with open("alpha_ll.csv", "w") as ofs:
271
+ ofs.write("a,ll,der1\n")
272
+ for x in numpy.linspace(0., 1.0, 100):
273
+ twin_data.alphas = x2alpha([x])
274
+ twin_data.est_f_true(Io, sigIo, 100)
275
+ f = twin_data.ll(Io, sigIo)
276
+ der1, der2 = twin_data.ll_der_alpha(Io, sigIo, True)
277
+ der1 = der1[:-1] - der1[-1]
278
+ ofs.write(f"{x},{f},{der1[0]}\n")
279
+ #twin_data.alphas = bak
280
+ #numpy.nan_to_num(der, False)
281
+ #print(der)
282
+ #der1 = numpy.sum(der, axis=1)
283
+ der1 = der1[:-1] - der1[-1]
284
+ #tmp = der.dot(der.T)
285
+ der2 = tmp[:-1,:-1].copy()
286
+ N = der2.shape[0]
287
+ #der2[numpy.diag_indices(N)] += tmp[-1, -1]
288
+ der2 += tmp[-1, -1]
289
+ for i in range(N):
290
+ for j in range(N):
291
+ der2[i, j] -= tmp[i, -1] + tmp[j, -1]
292
+ #rows, cols = numpy.indices((N, N))
293
+ #non_diag_rows, non_diag_cols = numpy.where(rows != cols)
294
+ #der2[non_diag_rows, non_diag_cols] -= tmp[:-1,-1][non_diag_rows]
295
+ shift = -der1.dot(numpy.linalg.pinv(der2))
296
+ x += shift
297
+ twin_data.alphas = x2alpha(x)
298
+ twin_data.est_f_true(Io, sigIo)
299
+ f = twin_data.ll(Io, sigIo)
300
+ logger.writeln(f"{cyc=} {shift=} alpha={twin_data.alphas} f={f} diff={f - f0}")
301
+ if f > f0:
302
+ x -= shift
303
+ twin_data.alphas = x2alpha(x)
304
+ twin_data.est_f_true(Io, sigIo)
305
+ break
306
+ if numpy.abs(shift).max() < 1e-4:
307
+ break
308
+ f0 = f
309
+ if 0:
310
+ with open("debug.csv", "w") as ofs:
311
+ ofs.write("scale,f\n")
312
+ for i in range(10):
313
+ ofs.write(f"{i},{fun(x2alpha(x + i * shift))}\n")
314
+ quit()
315
+ logger.write(" ML twin fraction estimate: ")
316
+ logger.writeln(" ".join("%.4f"%x for x in twin_data.alphas))
@@ -0,0 +1,60 @@
1
+ Metadata-Version: 2.2
2
+ Name: servalcat
3
+ Version: 0.4.131
4
+ Summary: Structure refinement and validation for crystallography and single particle analysis
5
+ Author: Keitaro Yamashita, Garib N. Murshudov
6
+ License: MPL-2.0
7
+ Project-URL: repository, https://github.com/keitaroyam/servalcat
8
+ Requires-Python: >=3.8
9
+ Requires-Dist: packaging
10
+ Requires-Dist: numpy>=1.15
11
+ Requires-Dist: scipy
12
+ Requires-Dist: pandas>=1.1.0
13
+ Requires-Dist: omegaconf==2.3.0
14
+ Requires-Dist: gemmi==0.7.4
15
+ Description-Content-Type: text/markdown
16
+
17
+ # Servalcat
18
+ [![Build](https://github.com/keitaroyam/servalcat/workflows/CI/badge.svg)](https://github.com/keitaroyam/servalcat/actions/workflows/ci.yml)
19
+ [![PyPI](https://img.shields.io/pypi/v/servalcat?color=blue)](https://pypi.org/project/servalcat/)
20
+
21
+ **S**tructur**e** **r**efinement and **val**idation for **c**rystallography and single p**a**r**t**icle analysis
22
+
23
+ Servalcat implements pipelines that use Refmac5:
24
+ * `servalcat refine_spa`: cryo-EM SPA refinement pipeline
25
+ * `servalcat refine_cx`: small molecule crystallography
26
+
27
+ and a Refmac5 controller
28
+ * `refmacat`: behaves as Refmac, but uses GEMMI for restraint generation instead of MAKECIF
29
+
30
+ Now “No Refmac5” refinement programs have been actively developed:
31
+ * `servalcat refine_geom`: geometry optimization
32
+ * `servalcat refine_spa_norefmac`: "No Refmac" version of refine\_spa
33
+ * `servalcat refine_xtal_norefmac`: crystallographic refinement
34
+
35
+ Also, it has several utility commands: `servalcat util`.
36
+
37
+ ## Installation
38
+
39
+ ```shell
40
+ # PyPI
41
+ pip install servalcat
42
+
43
+ # Conda
44
+ conda install conda-forge::servalcat
45
+ ```
46
+ will install the stable version.
47
+
48
+ The required GEMMI version is now [v0.7.4](https://github.com/project-gemmi/gemmi/releases/tag/v0.7.4). It may not work with the latest gemmi code from the github. The policy is in the main branch I only push the code that works with the latest package of GEMMI.
49
+
50
+ To use the Refmac5 related commands, you also need to install [CCP4](https://www.ccp4.ac.uk/). For "No Refmac5" commands, you may just need [the monomer library](https://github.com/MonomerLibrary/monomers) if CCP4 is not installed.
51
+
52
+ **Notice:**
53
+ From ver. 0.4.6, Servalcat is no longer python-only package and has some C++ code. If you build Servalcat by yourself, probably you also need to build GEMMI using the same compiler.
54
+
55
+ ## Usage
56
+ Please read the documentation: https://servalcat.readthedocs.io/en/latest/
57
+
58
+ ## References
59
+ * [Yamashita, K., Wojdyr, M., Long, F., Nicholls, R. A., Murshudov, G. N. (2023) "GEMMI and Servalcat restrain REFMAC5" *Acta Cryst.* D**79**, 368-373](https://doi.org/10.1107/S2059798323002413)
60
+ * [Yamashita, K., Palmer, C. M., Burnley, T., Murshudov, G. N. (2021) "Cryo-EM single particle structure refinement and map calculation using Servalcat" *Acta Cryst. D***77**, 1282-1291](https://doi.org/10.1107/S2059798321009475)
@@ -0,0 +1,45 @@
1
+ servalcat/__init__.py,sha256=6kexHM7ADiCenv62rmZRAByqIixrLdbL-PVl3wGZqTE,232
2
+ servalcat/__main__.py,sha256=bNEZKrG5765uOp32UiBAJmeJi3O0sk4I5QjrXUbngIE,4070
3
+ servalcat/ext.cpython-314t-x86_64-linux-gnu.so,sha256=cHJrKx7WTEM3412T3GDG-dRS9V-BLSKORIhaHodoA-I,1605416
4
+ servalcat/refine/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ servalcat/refine/cgsolve.py,sha256=tHuWXr76x4sRAMUQ4dGVJzyHF_n1LiwKHTjAwh8iFMg,3072
6
+ servalcat/refine/refine.py,sha256=wEFFkgbhBPTstnOLhd0u4GneGpgY9c1s5fn6TOg3NN0,54290
7
+ servalcat/refine/refine_geom.py,sha256=TRmHs7EcpLyv9NVv1xMuIAeJ80F7fA16X4dNcofAyKg,12358
8
+ servalcat/refine/refine_spa.py,sha256=UXgUKmzinkr3G9Oarr3UF9u2POm1PIWl8TuJYjEIUa8,20884
9
+ servalcat/refine/refine_xtal.py,sha256=dpGtl4q9y9cpU2tekar3zLtwbYxF-jYyqxv64cXJ-_0,17755
10
+ servalcat/refine/spa.py,sha256=GmNGB3h1ICJfm3WAFODlbfkvsefnXhJBhnpw0pBwgSk,6733
11
+ servalcat/refine/xtal.py,sha256=pGmVHRE7DjFWH1U_R3_3kX7Ius9vrCnRl7G1FKNsFrY,16706
12
+ servalcat/refmac/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
+ servalcat/refmac/exte.py,sha256=PdGKNt27pGlz_mYkbbEBU-xLg9FXu-2sHLwlzGCuUck,9613
14
+ servalcat/refmac/refmac_keywords.py,sha256=rhlgRUD55P5y7AFmbeRJGb2HTkd6f9ARu5JVnyJz86Q,27553
15
+ servalcat/refmac/refmac_wrapper.py,sha256=5cjyOdBGj20r01qaUS8nqVbPchMEwooaODn2AukBEl8,19582
16
+ servalcat/spa/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
+ servalcat/spa/fofc.py,sha256=XN8Mwv0mMdstS-JvLEOm5TA5H-g1Z-fOkqvoYYXUQwU,23487
18
+ servalcat/spa/fsc.py,sha256=LOwC98zgSoBNs1uMZJ6SfPiItgOvb7M0ZMvPstjjsXo,17792
19
+ servalcat/spa/localcc.py,sha256=BffpHzPOjdjsw1EC5ym11ptUzhxCdIS-SQ-lEPBEXIM,8548
20
+ servalcat/spa/realspcc_from_var.py,sha256=U8dK7f0LG8kDk4TsOVrZoClilIQ2wQ5qruV1V4gvjTU,5327
21
+ servalcat/spa/run_refmac.py,sha256=xiZdDgOLRZWE6Co45GhrKrtShVXliZW_k23BhTkk-F8,49756
22
+ servalcat/spa/shift_maps.py,sha256=rrD7GRjshERs_jHUBKs296GAIbbahkbmFVeK_6YRgt0,13286
23
+ servalcat/spa/shiftback.py,sha256=A9OfaZ8r2BC6A2uGXxNNhmyBNORUB_MeqJC29ZRebnw,4664
24
+ servalcat/spa/translate.py,sha256=Q4evxMHgLZYGNtDjbXH3jeBQvBtFWhQNdbC1LI4kVpo,5113
25
+ servalcat/utils/__init__.py,sha256=j-fMAqHvzyMMQXb2Sf7Urnk4oQ31pT-g_NXf3K-NM4c,1137
26
+ servalcat/utils/commands.py,sha256=F6ku-QusM_xjjEUXBqQNSFqdWOM1CgdV5ezYz3RBf9k,74882
27
+ servalcat/utils/fileio.py,sha256=1PGCnJd16vtD-AjJIBdYMtaUbi1aIval_0DN8K_NsTM,33532
28
+ servalcat/utils/generate_operators.py,sha256=fCrcTcciRn-6fpFtHYvf1oPXAObjuqR3WQejz5d0RrA,10980
29
+ servalcat/utils/hkl.py,sha256=VfLUnF6reAtJacKgym4phVQ28QkEa3MCeDqVi4iqVcE,32751
30
+ servalcat/utils/logger.py,sha256=eglkG7RxCJl4Q5P0aQGp4SVauoWh36nQgFvSJJLpyrM,4587
31
+ servalcat/utils/maps.py,sha256=1bJye49GzIcjYqOIm9m8rn1iV5JDigc3GGM2uw_Mqwc,13297
32
+ servalcat/utils/model.py,sha256=d_yAzQICAyQa33BNmOGpdcpeTKZglZwXVodJy2l2_mE,37168
33
+ servalcat/utils/refmac.py,sha256=4HCdtWKtZZeLHlZUc-hoJu-7OR-2D7DV1KRAzL5OaZ4,31263
34
+ servalcat/utils/restraints.py,sha256=8_Om2Mg8FA9GDFgukY7AhSUoeOh-P11DwXeHO6wtjgQ,42408
35
+ servalcat/utils/symmetry.py,sha256=dEEEpbMAI-jetyxTSBM_63Pihy6h667_z8k6RtMe6aM,12038
36
+ servalcat/xtal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
+ servalcat/xtal/french_wilson.py,sha256=9diaaXjViraRO6Yw1U5OQnR97s84bLdFe1Y3U0FROos,12242
38
+ servalcat/xtal/run_refmac_small.py,sha256=Yg-bEPuOdQxCwdwY8StCvXabcWHrRl6A22gJgLsypeU,10394
39
+ servalcat/xtal/sigmaa.py,sha256=tc7WhPgz7geKdNgpKLzBcvMrBQA83zj7scHp0XAER-w,92078
40
+ servalcat/xtal/twin.py,sha256=d4T90RHxO_ISaMmcpBEARwbc_VxQD1ixTpHw5qzMYUE,13633
41
+ servalcat-0.4.131.dist-info/METADATA,sha256=_X4MeLDi7rPj6lAmtPA8lwH18x2n2-IFN2VkkvF_tmI,2832
42
+ servalcat-0.4.131.dist-info/WHEEL,sha256=A8s_KceazLtiI2uTXvhiIF6QDI48t_VVmqXP_wAag-g,159
43
+ servalcat-0.4.131.dist-info/entry_points.txt,sha256=G1mDxhOCdF3umYz4k0kfwJbSdYSKqhvQdGCmrP8FRAY,111
44
+ servalcat-0.4.131.dist-info/RECORD,,
45
+ servalcat-0.4.131.dist-info/licenses/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
@@ -0,0 +1,6 @@
1
+ Wheel-Version: 1.0
2
+ Generator: scikit-build-core 0.11.6
3
+ Root-Is-Purelib: false
4
+ Tag: cp314-cp314t-manylinux_2_27_x86_64
5
+ Tag: cp314-cp314t-manylinux_2_28_x86_64
6
+
@@ -0,0 +1,4 @@
1
+ [console_scripts]
2
+ servalcat = servalcat.__main__:main
3
+ refmacat = servalcat.refmac.refmac_wrapper:command_line
4
+