servalcat 0.4.72__cp312-cp312-macosx_11_0_arm64.whl → 0.4.99__cp312-cp312-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of servalcat might be problematic. Click here for more details.

servalcat/refine/xtal.py CHANGED
@@ -14,23 +14,23 @@ from servalcat.utils import logger
14
14
  from servalcat.xtal import sigmaa
15
15
  from servalcat import utils
16
16
  from servalcat import ext
17
+ from servalcat.xtal.twin import find_twin_domains_from_data, estimate_twin_fractions_from_model
17
18
  b_to_u = utils.model.b_to_u
18
19
  u_to_b = utils.model.u_to_b
19
20
  integr = sigmaa.integr
20
21
 
21
22
  class LL_Xtal:
22
23
  def __init__(self, hkldata, centric_and_selections, free, st, monlib, source="xray", mott_bethe=True,
23
- use_solvent=False, use_in_est="all", use_in_target="all"):
24
+ use_solvent=False, use_in_est="all", use_in_target="all", twin=False):
24
25
  assert source in ("electron", "xray", "neutron")
25
26
  self.source = source
26
27
  self.mott_bethe = False if source != "electron" else mott_bethe
27
28
  self.hkldata = hkldata
28
- self.is_int = "I" in self.hkldata.df
29
29
  self.centric_and_selections = centric_and_selections
30
30
  self.free = free
31
31
  self.st = st
32
32
  self.monlib = monlib
33
- self.d_min = hkldata.d_min_max()[0]
33
+ self.d_min_max = hkldata.d_min_max()
34
34
  self.fc_labs = ["FC0"]
35
35
  self.use_solvent = use_solvent
36
36
  if use_solvent:
@@ -44,39 +44,61 @@ class LL_Xtal:
44
44
  self.use_in_target = use_in_target
45
45
  self.ll = None
46
46
  self.scaling = sigmaa.LsqScale()
47
+ if twin:
48
+ self.twin_data = find_twin_domains_from_data(self.hkldata)
49
+ else:
50
+ self.twin_data = None
51
+ if self.twin_data:
52
+ self.twin_data.setup_f_calc(len(self.fc_labs))
53
+ self.is_int = "I" in self.hkldata.df
47
54
  logger.writeln("will use {} reflections for parameter estimation".format(self.use_in_est))
48
55
  logger.writeln("will use {} reflections for refinement".format(self.use_in_target))
49
56
 
57
+ def refine_id(self):
58
+ return {"xray": "X-RAY", "electron": "ELECTRON", "neutron": "NEUTRON"}.get(self.source, "") + " DIFFRACTION"
59
+
50
60
  def update_ml_params(self):
51
61
  self.b_aniso = sigmaa.determine_ml_params(self.hkldata, self.is_int, self.fc_labs, self.D_labs, self.b_aniso,
52
- self.centric_and_selections, use=self.use_in_est,
53
- )#D_trans="splus", S_trans="splus")
62
+ self.centric_and_selections, use=self.use_in_est,
63
+ twin_data=self.twin_data)#D_trans="splus", S_trans="splus")
54
64
  self.hkldata.df["k_aniso"] = self.hkldata.debye_waller_factors(b_cart=self.b_aniso)
55
65
  #determine_mlf_params_from_cc(self.hkldata, self.fc_labs, self.D_labs,
56
66
  # self.centric_and_selections)
57
-
58
-
59
67
  def update_fc(self):
60
- if self.st.ncs:
61
- st = self.st.clone()
62
- st.expand_ncs(gemmi.HowToNameCopiedChain.Dup, merge_dist=0)
63
- else:
64
- st = self.st
68
+ sigmaa.update_fc(st_list=[self.st], fc_labs=self.fc_labs,
69
+ d_min=self.d_min_max[0], monlib=self.monlib,
70
+ source=self.source, mott_bethe=self.mott_bethe,
71
+ hkldata=self.hkldata, twin_data=self.twin_data)
65
72
 
66
- self.hkldata.df[self.fc_labs[0]] = utils.model.calc_fc_fft(st, self.d_min - 1e-6,
67
- monlib=self.monlib,
68
- source=self.source,
69
- mott_bethe=self.mott_bethe,
70
- miller_array=self.hkldata.miller_array())
71
- self.hkldata.df["FC"] = self.hkldata.df[self.fc_labs].sum(axis=1)
73
+ def prepare_target(self):
74
+ if self.twin_data:
75
+ if self.use_in_target == "all":
76
+ idxes = numpy.concatenate([sel[i] for i_bin, _ in self.hkldata.binned()
77
+ for sel in self.centric_and_selections[i_bin] for i in (1,2)])
78
+ else:
79
+ i = 1 if self.use_in_target == "work" else 2
80
+ idxes = numpy.concatenate([sel[i] for i_bin, _ in self.hkldata.binned()
81
+ for sel in self.centric_and_selections[i_bin]])
82
+ mask = numpy.empty(len(self.hkldata.df.index)) * numpy.nan
83
+ mask[idxes] = 1 / self.hkldata.debye_waller_factors(b_cart=self.b_aniso)[idxes]**2
84
+ self.twin_data.est_f_true(self.hkldata.df.I.to_numpy() * mask,
85
+ self.hkldata.df.SIGI.to_numpy() * mask)
72
86
 
73
87
  def overall_scale(self, min_b=0.1):
74
- fc_list = [self.hkldata.df[self.fc_labs[0]].to_numpy()]
88
+ miller_array = self.twin_data.asu if self.twin_data else self.hkldata.miller_array()
75
89
  if self.use_solvent:
76
- Fmask = sigmaa.calc_Fmask(self.st, self.d_min - 1e-6, self.hkldata.miller_array())
77
- fc_list.append(Fmask)
78
-
79
- self.scaling.set_data(self.hkldata, fc_list, self.is_int, sigma_cutoff=0)
90
+ Fmask = sigmaa.calc_Fmask(self.st, self.d_min_max[0], miller_array)
91
+ if self.twin_data:
92
+ fc_sum = self.twin_data.f_calc[:,:-1].sum(axis=1)
93
+ else:
94
+ fc_sum = self.hkldata.df[self.fc_labs[:-1]].sum(axis=1).to_numpy()
95
+ fc_list = [fc_sum, Fmask]
96
+ else:
97
+ if twin_data:
98
+ fc_list = [self.twin_data.f_calc.sum(axis=1)]
99
+ else:
100
+ fc_list = [self.hkldata.df[self.fc_labs].sum(axis=1).to_numpy()]
101
+ self.scaling.set_data(self.hkldata, fc_list, self.is_int, sigma_cutoff=0, twin_data=self.twin_data)
80
102
  self.scaling.scale()
81
103
  self.b_aniso = self.scaling.b_aniso
82
104
  b = self.scaling.b_iso
@@ -90,9 +112,15 @@ class LL_Xtal:
90
112
  k_iso = self.hkldata.debye_waller_factors(b_iso=b)
91
113
  self.hkldata.df["k_aniso"] = self.hkldata.debye_waller_factors(b_cart=self.b_aniso)
92
114
  if self.use_solvent:
93
- solvent_scale = self.scaling.get_solvent_scale(self.scaling.k_sol, self.scaling.b_sol,
94
- 1. / self.hkldata.d_spacings().to_numpy()**2)
95
- self.hkldata.df[self.fc_labs[-1]] = Fmask * solvent_scale
115
+ if self.twin_data:
116
+ s2 = numpy.asarray(self.twin_data.s2_array)
117
+ else:
118
+ s2 = 1. / self.hkldata.d_spacings().to_numpy()**2
119
+ Fbulk = Fmask * self.scaling.get_solvent_scale(self.scaling.k_sol, self.scaling.b_sol, s2)
120
+ if self.twin_data:
121
+ self.twin_data.f_calc[:,-1] = Fbulk
122
+ else:
123
+ self.hkldata.df[self.fc_labs[-1]] = Fbulk
96
124
  if self.is_int:
97
125
  o_labs = self.hkldata.df.columns.intersection(["I", "SIGI",
98
126
  "I(+)","SIGI(+)", "I(-)", "SIGI(-)"])
@@ -102,103 +130,125 @@ class LL_Xtal:
102
130
  "F(+)","SIGF(+)", "F(-)", "SIGF(-)"])
103
131
  self.hkldata.df[o_labs] /= self.scaling.k_overall
104
132
 
105
- for lab in self.fc_labs: self.hkldata.df[lab] *= k_iso
106
- self.hkldata.df["FC"] = self.hkldata.df[self.fc_labs].sum(axis=1)
133
+ if self.twin_data:
134
+ self.twin_data.f_calc[:] *= self.twin_data.debye_waller_factors(b_iso=b)[:,None]
135
+ else:
136
+ for lab in self.fc_labs: self.hkldata.df[lab] *= k_iso
137
+ self.hkldata.df["FC"] = self.hkldata.df[self.fc_labs].sum(axis=1)
107
138
 
108
139
  # for next cycle
109
140
  self.scaling.k_overall = 1.
110
141
  self.scaling.b_iso = 0.
142
+ if self.twin_data:
143
+ estimate_twin_fractions_from_model(self.twin_data, self.hkldata)
111
144
  # overall_scale()
112
145
 
113
146
  def calc_target(self): # -LL target for MLF or MLI
114
147
  ret = 0
115
- k_aniso = self.hkldata.debye_waller_factors(b_cart=self.b_aniso)
116
- f = sigmaa.mli if self.is_int else sigmaa.mlf
117
- for i_bin, _ in self.hkldata.binned():
118
- if self.use_in_target == "all":
119
- idxes = numpy.concatenate([sel[i] for sel in self.centric_and_selections[i_bin] for i in (1,2)])
120
- else:
121
- i = 1 if self.use_in_target == "work" else 2
122
- idxes = numpy.concatenate([sel[i] for sel in self.centric_and_selections[i_bin]])
123
- ret += f(self.hkldata.df,
124
- self.fc_labs,
125
- numpy.vstack([self.hkldata.df[lab].to_numpy()[idxes] for lab in self.D_labs]).T,
126
- self.hkldata.df.S.to_numpy()[idxes],
127
- k_aniso,
128
- idxes)
148
+ if self.twin_data:
149
+ ret = self.twin_data.ll()
150
+ else:
151
+ k_aniso = self.hkldata.debye_waller_factors(b_cart=self.b_aniso)
152
+ f = sigmaa.mli if self.is_int else sigmaa.mlf
153
+ for i_bin, _ in self.hkldata.binned():
154
+ if self.use_in_target == "all":
155
+ idxes = numpy.concatenate([sel[i] for sel in self.centric_and_selections[i_bin] for i in (1,2)])
156
+ else:
157
+ i = 1 if self.use_in_target == "work" else 2
158
+ idxes = numpy.concatenate([sel[i] for sel in self.centric_and_selections[i_bin]])
159
+ ret += f(self.hkldata.df,
160
+ self.fc_labs,
161
+ numpy.vstack([self.hkldata.df[lab].to_numpy()[idxes] for lab in self.D_labs]).T,
162
+ self.hkldata.df.S.to_numpy()[idxes],
163
+ k_aniso,
164
+ idxes)
129
165
  return ret * 2 # friedel mates
130
166
  # calc_target()
131
167
 
132
168
  def calc_stats(self, bin_stats=False):
133
- stats, overall = sigmaa.calc_r_and_cc(self.hkldata, self.centric_and_selections)
169
+ stats, overall = sigmaa.calc_r_and_cc(self.hkldata, self.centric_and_selections, self.twin_data)
134
170
  ret = {"summary": overall}
135
171
  ret["summary"]["-LL"] = self.calc_target()
172
+ if self.twin_data:
173
+ ret["twin_alpha"] = self.twin_data.alphas
136
174
  if bin_stats:
137
175
  ret["bin_stats"] = stats
138
176
  for lab in "R", "CC":
139
177
  logger.writeln(" ".join("{} = {:.4f}".format(x, overall[x]) for x in overall if x.startswith(lab)))
178
+ if self.is_int:
179
+ logger.writeln("R1 is calculated for reflections with I/sigma>2.")
140
180
  return ret
141
181
 
142
182
  def calc_grad(self, atom_pos, refine_xyz, adp_mode, refine_occ, refine_h, specs=None):
143
- dll_dab = numpy.zeros(len(self.hkldata.df.FC), dtype=numpy.complex128)
144
- d2ll_dab2 = numpy.empty(len(self.hkldata.df.index))
145
- d2ll_dab2[:] = numpy.nan
146
- blur = utils.model.determine_blur_for_dencalc(self.st, self.d_min / 3) # TODO need more work
183
+ blur = utils.model.determine_blur_for_dencalc(self.st, self.d_min_max[0] / 3) # TODO need more work
147
184
  logger.writeln("blur for deriv= {:.2f}".format(blur))
148
- k_ani = self.hkldata.debye_waller_factors(b_cart=self.b_aniso)
149
- for i_bin, _ in self.hkldata.binned():
150
- for c, work, test in self.centric_and_selections[i_bin]:
151
- if self.use_in_target == "all":
152
- cidxes = numpy.concatenate([work, test])
153
- else:
154
- cidxes = work if self.use_in_target == "work" else test
155
- epsilon = self.hkldata.df.epsilon.to_numpy()[cidxes]
156
- Fcs = numpy.vstack([self.hkldata.df[lab].to_numpy()[cidxes] for lab in self.fc_labs]).T
157
- Ds = numpy.vstack([self.hkldata.df[lab].to_numpy()[cidxes] for lab in self.D_labs]).T
158
- S = self.hkldata.df["S"].to_numpy()[cidxes]
159
- Fc = (Ds * Fcs).sum(axis=1)
160
- Fc_abs = numpy.abs(Fc)
161
- expip = numpy.exp(1j * numpy.angle(Fc))
162
- if self.is_int:
163
- Io = self.hkldata.df.I.to_numpy()
164
- sigIo = self.hkldata.df.SIGI.to_numpy()
165
- to = Io[cidxes] / sigIo[cidxes] - sigIo[cidxes] / (c+1) / k_ani[cidxes]**2 / S / epsilon
166
- tf = k_ani[cidxes] * Fc_abs / numpy.sqrt(sigIo[cidxes])
167
- sig1 = k_ani[cidxes]**2 * epsilon * S / sigIo[cidxes]
168
- k_num = 0.5 if c == 0 else 0. # acentric:0.5, centric: 0.
169
- r = ext.integ_J_ratio(k_num, k_num - 0.5, True, to, tf, sig1, c+1,
170
- integr.exp2_threshold, integr.h, integr.N, integr.ewmax)
171
- r *= numpy.sqrt(sigIo[cidxes]) / k_ani[cidxes]
172
- g = (2-c) * (Fc_abs - r) / epsilon / S * Ds[:,0]
173
- dll_dab[cidxes] = g * expip
174
- #d2ll_dab2[cidxes] = (2-c)**2 / S / epsilon * Ds[0]**2 # approximation
175
- #d2ll_dab2[cidxes] = ((2-c) / S / epsilon + ((2-c) * r / k_ani[cidxes] / epsilon / S)**2) * Ds[0]**2
176
- d2ll_dab2[cidxes] = g**2
177
- else:
178
- Fo = self.hkldata.df.FP.to_numpy()[cidxes] / k_ani[cidxes]
179
- SigFo = self.hkldata.df.SIGFP.to_numpy()[cidxes] / k_ani[cidxes]
180
- if c == 0: # acentric
181
- Sigma = 2 * SigFo**2 + epsilon * S
182
- X = 2 * Fo * Fc_abs / Sigma
183
- m = gemmi.bessel_i1_over_i0(X)
184
- g = 2 * (Fc_abs - m * Fo) / Sigma * Ds[:,0] # XXX assuming 0 is atomic structure
185
- dll_dab[cidxes] = g * expip
186
- d2ll_dab2[cidxes] = (2 / Sigma - (1 - m / X - m**2) * (2 * Fo / Sigma)**2) * Ds[:,0]**2
185
+ if self.twin_data:
186
+ dll_dab, d2ll_dab2 = self.twin_data.ll_der_fc0()
187
+ dll_dab *= self.twin_data.debye_waller_factors(b_iso=-blur)
188
+ else:
189
+ dll_dab = numpy.zeros(len(self.hkldata.df.FC), dtype=numpy.complex128)
190
+ d2ll_dab2 = numpy.empty(len(self.hkldata.df.index))
191
+ d2ll_dab2[:] = numpy.nan
192
+ k_ani = self.hkldata.debye_waller_factors(b_cart=self.b_aniso)
193
+ for i_bin, _ in self.hkldata.binned():
194
+ for c, work, test in self.centric_and_selections[i_bin]:
195
+ if self.use_in_target == "all":
196
+ cidxes = numpy.concatenate([work, test])
187
197
  else:
188
- Sigma = SigFo**2 + epsilon * S
189
- X = Fo * Fc_abs / Sigma
190
- #X = X.astype(numpy.float64)
191
- m = numpy.tanh(X)
192
- g = (Fc_abs - m * Fo) / Sigma * Ds[:,0]
198
+ cidxes = work if self.use_in_target == "work" else test
199
+ epsilon = self.hkldata.df.epsilon.to_numpy()[cidxes]
200
+ Fcs = numpy.vstack([self.hkldata.df[lab].to_numpy()[cidxes] for lab in self.fc_labs]).T
201
+ Ds = numpy.vstack([self.hkldata.df[lab].to_numpy()[cidxes] for lab in self.D_labs]).T
202
+ S = self.hkldata.df["S"].to_numpy()[cidxes]
203
+ Fc = (Ds * Fcs).sum(axis=1)
204
+ Fc_abs = numpy.abs(Fc)
205
+ expip = numpy.exp(1j * numpy.angle(Fc))
206
+ if self.is_int:
207
+ Io = self.hkldata.df.I.to_numpy()
208
+ sigIo = self.hkldata.df.SIGI.to_numpy()
209
+ to = Io[cidxes] / sigIo[cidxes] - sigIo[cidxes] / (c+1) / k_ani[cidxes]**2 / S / epsilon
210
+ tf = k_ani[cidxes] * Fc_abs / numpy.sqrt(sigIo[cidxes])
211
+ sig1 = k_ani[cidxes]**2 * epsilon * S / sigIo[cidxes]
212
+ k_num = numpy.repeat(0.5 if c == 0 else 0., to.size) # acentric:0.5, centric: 0.
213
+ r = ext.integ_J_ratio(k_num, k_num - 0.5, True, to, tf, sig1, numpy.repeat(c+1, to.size),
214
+ integr.exp2_threshold, integr.h, integr.N, integr.ewmax)
215
+ r *= numpy.sqrt(sigIo[cidxes]) / k_ani[cidxes]
216
+ g = (2-c) * (Fc_abs - r) / epsilon / S * Ds[:,0]
193
217
  dll_dab[cidxes] = g * expip
194
- d2ll_dab2[cidxes] = (1. / Sigma - (Fo / (Sigma * numpy.cosh(X)))**2) * Ds[:,0]**2
218
+ #d2ll_dab2[cidxes] = (2-c)**2 / S / epsilon * Ds[0]**2 # approximation
219
+ #d2ll_dab2[cidxes] = ((2-c) / S / epsilon + ((2-c) * r / k_ani[cidxes] / epsilon / S)**2) * Ds[0]**2
220
+ d2ll_dab2[cidxes] = g**2
221
+ else:
222
+ Fo = self.hkldata.df.FP.to_numpy()[cidxes] / k_ani[cidxes]
223
+ SigFo = self.hkldata.df.SIGFP.to_numpy()[cidxes] / k_ani[cidxes]
224
+ if c == 0: # acentric
225
+ Sigma = 2 * SigFo**2 + epsilon * S
226
+ X = 2 * Fo * Fc_abs / Sigma
227
+ m = gemmi.bessel_i1_over_i0(X)
228
+ g = 2 * (Fc_abs - m * Fo) / Sigma * Ds[:,0] # XXX assuming 0 is atomic structure
229
+ dll_dab[cidxes] = g * expip
230
+ d2ll_dab2[cidxes] = (2 / Sigma - (1 - m / X - m**2) * (2 * Fo / Sigma)**2) * Ds[:,0]**2
231
+ else:
232
+ Sigma = SigFo**2 + epsilon * S
233
+ X = Fo * Fc_abs / Sigma
234
+ #X = X.astype(numpy.float64)
235
+ m = numpy.tanh(X)
236
+ g = (Fc_abs - m * Fo) / Sigma * Ds[:,0]
237
+ dll_dab[cidxes] = g * expip
238
+ d2ll_dab2[cidxes] = (1. / Sigma - (Fo / (Sigma * numpy.cosh(X)))**2) * Ds[:,0]**2
239
+ dll_dab *= self.hkldata.debye_waller_factors(b_iso=-blur)
195
240
 
196
241
  if self.mott_bethe:
197
- dll_dab *= self.hkldata.d_spacings()**2 * gemmi.mott_bethe_const()
242
+ d2 = 1 / self.twin_data.s2_array if self.twin_data else self.hkldata.d_spacings()**2
243
+ dll_dab *= d2 * gemmi.mott_bethe_const()
198
244
  d2ll_dab2 *= gemmi.mott_bethe_const()**2
199
245
 
246
+
200
247
  # we need V**2/n for gradient.
201
- dll_dab_den = self.hkldata.fft_map(data=dll_dab * self.hkldata.debye_waller_factors(b_iso=-blur))
248
+ if self.twin_data:
249
+ dll_dab_den = utils.hkl.fft_map(self.hkldata.cell, self.hkldata.sg, self.twin_data.asu, data=dll_dab)
250
+ else:
251
+ dll_dab_den = self.hkldata.fft_map(data=dll_dab)
202
252
  dll_dab_den.array[:] *= self.hkldata.cell.volume**2 / dll_dab_den.point_count
203
253
  #asu = dll_dab_den.masked_asu()
204
254
  #dll_dab_den.array[:] *= 1 - asu.mask_array # 0 to use
@@ -211,13 +261,12 @@ class LL_Xtal:
211
261
  self.ll.calc_grad_it92(dll_dab_den, blur)
212
262
 
213
263
  # second derivative
264
+ s_array = numpy.sqrt(self.twin_data.s2_array) if self.twin_data else 1./self.hkldata.d_spacings().to_numpy()
214
265
  if self.source == "neutron":
215
- self.ll.make_fisher_table_diag_direct_n92(1./self.hkldata.d_spacings().to_numpy(),
216
- d2ll_dab2)
266
+ self.ll.make_fisher_table_diag_direct_n92(s_array, d2ll_dab2)
217
267
  self.ll.fisher_diag_from_table_n92()
218
268
  else:
219
- self.ll.make_fisher_table_diag_direct_it92(1./self.hkldata.d_spacings().to_numpy(),
220
- d2ll_dab2)
269
+ self.ll.make_fisher_table_diag_direct_it92(s_array, d2ll_dab2)
221
270
  self.ll.fisher_diag_from_table_it92()
222
271
  #json.dump(dict(b=ll.table_bs, pp1=ll.pp1, bb=ll.bb),
223
272
  # open("ll_fisher.json", "w"), indent=True)
servalcat/refmac/exte.py CHANGED
@@ -120,18 +120,20 @@ def read_external_restraints(params, st, geom):
120
120
  if r["rest_type"] == "dist":
121
121
  if not (defs["dist_min_external"] < r["restr"]["value"] < defs["dist_max_external"]):
122
122
  continue
123
- if ex.atoms[0].serial > ex.atoms[1].serial:
124
- ex.atoms = ex.atoms[::-1]
125
123
  ex.alpha = r["restr"].get("alpha_in", defs["alpha_default"])
126
124
  ex.type = r["restr"].get("itype_in", defs["type_default"])
127
125
  symm1 = any([spec.get("symm") for spec in r["restr"]["specs"]]) # is it the intention?
128
126
  if r["restr"].get("symm_in", defs["symall_block"]) or symm1:
129
127
  asu = gemmi.Asu.Different if defs["exclude_self_block"] else gemmi.Asu.Any
130
- im = st.cell.find_nearest_image(ex.atoms[0].pos, ex.atoms[1].pos, asu)
131
- ex.set_image(im)
128
+ ex.set_image(st.cell, asu)
132
129
  #print("dist=", ex.alpha, ex.type, ex.values[-1].value, ex.values[-1].sigma, ex.sym_idx, ex.pbc_shift, ex.atoms)
133
130
  elif r["rest_type"] == "angl":
134
- pass
131
+ if any(spec.get("symm") for spec in r["restr"]["specs"]):
132
+ asus = [gemmi.Asu.Different if r["restr"]["specs"][i].get("symm") else gemmi.Asu.Same
133
+ for i in range(3)]
134
+ if atoms[0].serial > atoms[2].serial:
135
+ asus = asus[::-1]
136
+ ex.set_images(st.cell, asus[0], asus[2])
135
137
  #print("angl=", ex.values[-1].value, ex.values[-1].sigma, ex.atoms)
136
138
  elif r["rest_type"] == "tors":
137
139
  pass
@@ -37,6 +37,7 @@ def parse_atom_spec(s, itk):
37
37
  itk += 2
38
38
  elif s[itk].lower().startswith("symm"):
39
39
  ret["symm"] = s[itk+1][0].lower() == "y"
40
+ itk += 2
40
41
  else:
41
42
  break
42
43
 
@@ -167,10 +168,10 @@ def read_exte(s):
167
168
  except ValueError:
168
169
  ret["restr"]["itype_in"] = dict(o=0, f=2).get(s[itk+1][0].lower(), 1)
169
170
  if not (0 <= ret["restr"]["itype_in"] <= 2):
170
- logger.writeln("WARNING: wrong type is given. setting to 2.\n=> {}".format(l))
171
+ logger.writeln("WARNING: wrong type is given. setting to 2.\n=> {}".format(" ".join(s)))
171
172
  ret["restr"]["itype_in"] = 2
172
173
  itk += 2
173
- elif s[itk].lower().startswith("symm"): # only for distance
174
+ elif s[itk].lower().startswith("symm"): # only for distance and angle
174
175
  ret["restr"]["symm_in"] = s[itk+1][0].lower() == "y"
175
176
  itk += 2
176
177
  else:
@@ -181,7 +182,8 @@ def read_exte(s):
181
182
  ret["restr"][d[k]] = float(s[itk+1])
182
183
  itk += 2
183
184
  else:
184
- logger.writeln("unrecognised key: {}\n=> {}".format(s[itk], l))
185
+ logger.writeln("unrecognised key: {}\n=> {}".format(s[itk], " ".join(s)))
186
+ break
185
187
  elif s[1].lower().startswith("stac"):
186
188
  ret["rest_type"] = "stac"
187
189
  ret["restr"] = {}
@@ -194,7 +196,7 @@ def read_exte(s):
194
196
  ip = int(s[itk+1])
195
197
  itk += 2
196
198
  if ip not in (1, 2):
197
- raise RuntimeError("Problem with stacking instructions. Plane number can be 1 or 2.\n=> {}".format(l))
199
+ raise RuntimeError("Problem with stacking instructions. Plane number can be 1 or 2.\n=> {}".format(" ".join(s)))
198
200
  elif s[itk].lower().startswith(("firs", "next")):
199
201
  atoms, itk = parse_atom_spec(s, itk+1)
200
202
  ret["restr"]["specs"][ip-1] = atoms
@@ -203,7 +205,7 @@ def read_exte(s):
203
205
  ret["restr"][k] = float(s[itk+1]) if k != "type_r" else int(s[itk+1])
204
206
  itk += 2
205
207
  else:
206
- logger.writeln("WARNING: unrecognised keyword: {}\n=> {}".format(s[itk], l))
208
+ logger.writeln("WARNING: unrecognised keyword: {}\n=> {}".format(s[itk], " ".join(s)))
207
209
  itk += 1
208
210
  elif s[1].lower().startswith(("harm", "spec")):
209
211
  ret["rest_type"] = s[1][:4].lower() # in Refmac, irest_type = 1 if harm else 2
@@ -241,11 +243,11 @@ def read_exte(s):
241
243
  ret["restr"]["sigma_u"] = float(s[itk+1]) * b_to_u
242
244
  itk += 2
243
245
  else:
244
- logger.writeln("WARNING: unrecognised keyword: {}\n=> {}".format(s[itk], l))
246
+ logger.writeln("WARNING: unrecognised keyword: {}\n=> {}".format(s[itk], " ".join(s)))
245
247
  itk += 1
246
248
 
247
249
  else:
248
- logger.writeln("WARNING: cannot parse: {}".format(l))
250
+ logger.writeln("WARNING: cannot parse: {}".format(" ".join(s)))
249
251
  return ret
250
252
  # read_exte()
251
253
 
@@ -543,8 +545,8 @@ def parse_line(l, ret):
543
545
  ret.setdefault("refi", {})
544
546
  itk = 1
545
547
  while itk < ntok:
546
- if s[itk].startswith("type"):
547
- if itk+1 < ntok and s[itk+1].startswith("unre"):
548
+ if s[itk].lower().startswith("type"):
549
+ if itk+1 < ntok and s[itk+1].lower().startswith("unre"):
548
550
  ret["refi"]["type"] = "unre"
549
551
  itk += 2
550
552
  else: