pycistem 0.3.5__cp310-cp310-manylinux_2_27_x86_64.whl → 0.4.1__cp310-cp310-manylinux_2_27_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycistem might be problematic. Click here for more details.

pycistem/__init__.py CHANGED
@@ -1,7 +1,7 @@
1
- __version__ = "0.3.5"
1
+ __version__ = "0.4.1"
2
2
 
3
3
  #from . import core
4
- from pycistem import database, programs
4
+ from pycistem import database, programs, utils
5
5
  from pycistem.config import set_cistem_path
6
6
 
7
7
 
pycistem/core/core.cpp CHANGED
@@ -268,7 +268,7 @@ PYBIND11_MODULE(core, m)
268
268
  .def("DefocusGivenAzimuth", &CTF::DefocusGivenAzimuth)
269
269
  .def("BeamTiltGivenAzimuth", &CTF::BeamTiltGivenAzimuth)
270
270
  .def("ParticleShiftGivenAzimuth", &CTF::ParticleShiftGivenAzimuth)
271
- .def("WavelengthGivenAccelerationVoltage", &CTF::WavelengthGivenAccelerationVoltage)
271
+ //.def("WavelengthGivenAccelerationVoltage", &CTF::WavelengthGivenAccelerationVoltage)
272
272
  .def("GetLowestFrequencyForFitting", &CTF::GetLowestFrequencyForFitting)
273
273
  .def("GetHighestFrequencyForFitting", &CTF::GetHighestFrequencyForFitting)
274
274
  .def("GetHighestFrequencyWithGoodFit", &CTF::GetHighestFrequencyWithGoodFit)
@@ -611,7 +611,7 @@ PYBIND11_MODULE(core, m)
611
611
  .def("Normalize", &Image::Normalize)
612
612
  //.def("NormalizeSumOfSquares", &Image::NormalizeSumOfSquares)
613
613
  .def("ZeroFloatOutside", &Image::ZeroFloatOutside)
614
- .def("ReplaceOutliersWithMean", &Image::ReplaceOutliersWithMean)
614
+ .def("ReplaceOutliersWithMean", static_cast<void (Image::*)(float)>(&Image::ReplaceOutliersWithMean))
615
615
  .def("ReturnVarianceOfRealValues", &Image::ReturnVarianceOfRealValues)
616
616
  .def("ReturnDistributionOfRealValues", &Image::ReturnDistributionOfRealValues)
617
617
  .def("UpdateDistributionOfRealValues", &Image::UpdateDistributionOfRealValues)
@@ -197,7 +197,7 @@ def write_match_template_to_starfile(project, match_template_job_id, filename,ov
197
197
  "cisTEMAngleTheta": pd.Series(dtype="float"),
198
198
  "cisTEMAnglePhi": pd.Series(dtype="float"),
199
199
  "cisTEMPixelSize": pd.Series(dtype="float"),
200
- "cisTEMScore": pd.Series(dtype="float")
200
+ "cisTEMScore": pd.Series(dtype="float"),
201
201
  })
202
202
 
203
203
  with contextlib.closing(sqlite3.connect(project)) as con:
@@ -231,9 +231,9 @@ def write_match_template_to_starfile(project, match_template_job_id, filename,ov
231
231
  # Due to a bug in cisTEM in earlier matches phi and psi are switched in the
232
232
  # database
233
233
  if(switch_phi_psi):
234
- temp = result_peaks["phi"]
235
- result_peaks["phi"] = result_peaks["psi"]
236
- result_peaks["psi"] = temp
234
+ temp = result_peaks["cisTEMAnglePhi"]
235
+ result_peaks["cisTEMAnglePhi"] = result_peaks["cisTEMAnglePsi"]
236
+ result_peaks["cisTEMAnglePsi"] = temp
237
237
  starfile.write(result_peaks, filename=filename, overwrite=overwrite)
238
238
 
239
239
 
@@ -267,20 +267,33 @@ def get_already_processed_images(database, match_template_job_id):
267
267
  def get_num_already_processed_images(database, match_template_job_id):
268
268
  with contextlib.closing(sqlite3.connect(database)) as con:
269
269
  cur = con.cursor()
270
- cur.execute(f"SELECT COUNT(*) FROM TEMPLATE_MATCH_LIST WHERE TEMPLATE_MATCH_JOB_ID = {match_template_job_id}")
270
+ cur.execute(f"SELECT COUNT(1) FROM TEMPLATE_MATCH_LIST WHERE TEMPLATE_MATCH_JOB_ID = {match_template_job_id}")
271
271
  num_already_processed_images = cur.fetchone()[0]
272
272
  return(num_already_processed_images)
273
273
 
274
+ def get_num_matches(database, match_template_job_id):
275
+ with contextlib.closing(sqlite3.connect(database)) as con:
276
+ cur = con.cursor()
277
+ cur.execute(f"SELECT TEMPLATE_MATCH_ID FROM TEMPLATE_MATCH_LIST WHERE TEMPLATE_MATCH_JOB_ID = {match_template_job_id}")
278
+ match_template_ids = cur.fetchall()
279
+ total = 0
280
+ for mti in match_template_ids:
281
+ cur.execute(f"SELECT MAX(RowId) FROM TEMPLATE_MATCH_PEAK_LIST_{mti[0]}")
282
+ num_matches = cur.fetchone()[0]
283
+ if num_matches is not None:
284
+ total += num_matches
285
+ return(total)
286
+
274
287
  def get_num_images(database):
275
288
  with contextlib.closing(sqlite3.connect(database)) as con:
276
289
  cur = con.cursor()
277
- cur.execute("SELECT COUNT(*) FROM IMAGE_ASSETS")
290
+ cur.execute("SELECT COUNT(1) FROM IMAGE_ASSETS WHERE CTF_ESTIMATION_ID IS NOT -1")
278
291
  num_images = cur.fetchone()[0]
279
292
  return(num_images)
280
293
 
281
294
  def get_num_movies(database):
282
295
  with contextlib.closing(sqlite3.connect(database)) as con:
283
296
  cur = con.cursor()
284
- cur.execute("SELECT COUNT(*) FROM MOVIE_ASSETS")
297
+ cur.execute("SELECT COUNT(1) FROM MOVIE_ASSETS")
285
298
  num_movies = cur.fetchone()[0]
286
- return(num_movies)
299
+ return(num_movies)
@@ -3,4 +3,8 @@ from pycistem.programs.cistem_program import *
3
3
  from pycistem.programs.refine_template import *
4
4
  from pycistem.programs.refine_template_niko import *
5
5
  from pycistem.programs.refine_template_dev import *
6
+ from pycistem.programs.refine_ctf import *
7
+ from pycistem.programs.estimate_beamtilt import *
6
8
  from pycistem.programs.run_profile import *
9
+ from pycistem.programs.reconstruct3d import *
10
+ from pycistem.programs.resample import *
@@ -10,7 +10,7 @@ socket_send_next_job = b"z7PnJh=x;[b#f/6L"
10
10
  socket_time_to_die = b")[czL7$#Sg/d4-*K"
11
11
  socket_ready_to_send_single_job = b"-TDv(X*kY.:d`D5:"
12
12
  socket_i_have_an_error = b"8TU.cDc3jr,rb[SN"
13
- socket_i_have_info = b"+5nxvY@\t.!_R#Vn"
13
+ socket_i_have_info = b"+5nxvY@zt.!_R#Vn"
14
14
  socket_job_finished = b"jNA[3!VdLdkb$LwM"
15
15
  socket_number_of_connections = b"Uu6tsQ,z}M''T`7f"
16
16
  socket_all_jobs_finished = b"aL)yaH[$3s;9Ymk6"
@@ -43,9 +43,9 @@ def parameters_from_database(database, image_asset_id, output_filename, **kwargs
43
43
  **kwargs)
44
44
  return(par)
45
45
 
46
- def run(parameters: Union[ApplyCtfParameters,list[ApplyCtfParameters]]):
46
+ def run(parameters: Union[ApplyCtfParameters,list[ApplyCtfParameters]],**kwargs):
47
47
  if not isinstance(parameters, list):
48
48
  parameters = [parameters]
49
49
 
50
50
 
51
- asyncio.run(cistem_program.run("applyctf",parameters))
51
+ asyncio.run(cistem_program.run("applyctf",parameters,**kwargs))
@@ -112,8 +112,9 @@ async def handle_leader(reader, writer, buffers, signal_handlers,results):
112
112
  cont = True
113
113
  while cont:
114
114
  data = await reader.readexactly(16)
115
+ log.debug(f"In no sig {addr} sent {data}")
115
116
  result = None
116
- if data != socket_job_result_queue:
117
+ if data != socket_job_result_queue and data != socket_i_have_info:
117
118
  cont = False
118
119
  else:
119
120
  await signal_handlers[data](reader,writer,log)
@@ -133,6 +134,7 @@ async def handle_leader(reader, writer, buffers, signal_handlers,results):
133
134
  while cont:
134
135
  if socket_send_next_job not in signal_handlers:
135
136
  data = await reader.readexactly(16)
137
+ log.debug(f"In cont {addr} sent {data}")
136
138
  if data != socket_send_next_job:
137
139
  if data == socket_job_result_queue:
138
140
  res = await signal_handlers[data](reader,writer,log)
@@ -0,0 +1,60 @@
1
+ import asyncio
2
+ from dataclasses import dataclass
3
+ from typing import Union
4
+ import numpy as np
5
+ import struct
6
+ import mrcfile
7
+ from functools import partial
8
+ import pandas as pd
9
+
10
+
11
+ from pycistem.programs import cistem_program
12
+ from pycistem.programs._cistem_constants import socket_send_next_job, socket_job_result_queue
13
+ from pycistem.core import Image
14
+
15
+ @dataclass
16
+ class EstimateBeamtiltParameters:
17
+ input_phase_difference_image: str
18
+ pixel_size: float = 1.0
19
+ voltage_kV: float = 300.0
20
+ spherical_aberration_mm: float = 2.7
21
+ first_position_to_search: int = 0
22
+ last_position_to_search: int = 0
23
+
24
+ async def handle_results(reader, writer, logger):
25
+ #logger.info("Handling results")
26
+ await reader.read(4)
27
+ length = await reader.read(4)
28
+ number_of_bytes = int.from_bytes(length, byteorder="little")
29
+ results = await reader.read(number_of_bytes*4)
30
+ score = struct.unpack("<f",results[0:4])[0]
31
+ beam_tilt_x = struct.unpack("<f",results[4:8])[0]
32
+ beam_tilt_y = struct.unpack("<f",results[8:12])[0]
33
+ particle_shift_x = struct.unpack("<f",results[12:16])[0]
34
+ particle_shift_y = struct.unpack("<f",results[16:20])[0]
35
+ return(score,beam_tilt_x,beam_tilt_y,particle_shift_x,particle_shift_y)
36
+
37
+ async def handle_job_result_queue(reader, writer, logger):
38
+
39
+ length = await reader.readexactly(4)
40
+ number_of_bytes = int.from_bytes(length, byteorder="little")
41
+ results = await reader.readexactly(number_of_bytes)
42
+ return(results)
43
+
44
+ def run(parameters: Union[EstimateBeamtiltParameters,list[EstimateBeamtiltParameters]],**kwargs) -> pd.DataFrame:
45
+
46
+ if not isinstance(parameters, list):
47
+ parameters = [parameters]
48
+ signal_handlers = {
49
+ socket_send_next_job : handle_results,
50
+ socket_job_result_queue : handle_job_result_queue,
51
+
52
+ }
53
+ result = asyncio.run(cistem_program.run("estimate_beamtilt", parameters, signal_handlers=signal_handlers,**kwargs))
54
+ result = pd.DataFrame([a[1] for a in result],
55
+ index = [a[0] for a in result],
56
+ columns=["score","beam_tilt_x","beam_tilt_y","particle_shift_x","particle_shift_y"])
57
+ return(result)
58
+
59
+
60
+
@@ -41,7 +41,7 @@ class MatchTemplateParameters:
41
41
  ctf_refinement: bool = False
42
42
  particle_radius_angstroms: float = 0.0
43
43
  phase_shift: float = 0.0
44
- mip_output_file: str = "mip.mrc"
44
+ mip_output_file: str = "/dev/null"
45
45
  best_psi_output_file: str = "best_psi.mrc"
46
46
  best_theta_output_file: str = "best_theta.mrc"
47
47
  best_phi_output_file: str = "best_phi.mrc"
@@ -250,6 +250,8 @@ async def handle_results(reader, writer, logger, parameters, write_directly_to_d
250
250
  ]
251
251
  f.write(" ".join(str(x) for x in temp_double_array) + "\n")
252
252
  mrcfile.write(par.scaled_mip_output_file, scaled_mip.astype(np.float32), overwrite=True)
253
+ if par.mip_output_file != "/dev/null":
254
+ mrcfile.write(par.mip_output_file, mip.astype(np.float32), overwrite=True)
253
255
  peak_coordinates = peak_local_max(scaled_mip, min_distance=int(par.min_peak_radius), exclude_border=50, threshold_abs=expected_threshold)
254
256
  result = pd.DataFrame({
255
257
  "X_POSITION": peak_coordinates[:,1] * par.pixel_size,
@@ -0,0 +1,79 @@
1
+ import asyncio
2
+
3
+ from dataclasses import dataclass
4
+ from typing import Union, List
5
+
6
+
7
+ from pycistem.programs import cistem_program
8
+ from pycistem.programs._cistem_constants import socket_i_have_info, socket_job_result_queue, socket_send_next_job
9
+
10
+ @dataclass
11
+ class Reconstruct3dParameters:
12
+ input_particle_stack: str
13
+ input_star_filename: str
14
+ input_reconstruction: str = "/dev/null"
15
+ output_reconstruction_1: str = "my_reconstruction1.mrc"
16
+ output_reconstruction_2: str = "my_reconstruction2.mrc"
17
+ output_reconstruction_filtered: str = "my_reconstruction_filtered.mrc"
18
+ output_resolution_statistics: str = "my_resolution_statistics.txt"
19
+ my_symmetry: str = "C1"
20
+ first_particle: int = 1
21
+ last_particle: int = 0
22
+ pixel_size: float = 1.0
23
+ molecular_mass_kDa: float = 300.0
24
+ inner_mask_radius: float = 0.0
25
+ outer_mask_radius: float = 150.0
26
+ resolution_limit_rec: float = 0.0
27
+ resolution_limit_ref: float = 0.0
28
+ score_weight_conversion: float = 5.0
29
+ score_threshold: float = 1.0
30
+ smoothing_factor: float = 1.0
31
+ padding : float = 1.0
32
+ normalize_particles: bool = True
33
+ adjust_scores: bool = True
34
+ invert_contrast: bool = False
35
+ exclude_blank_edges: bool = False
36
+ crop_images: bool = False
37
+ split_even_odd: bool = True
38
+ center_mass: bool = False
39
+ use_input_reconstruction: bool = False
40
+ threshold_input_3d: bool = True
41
+ dump_arrays: bool = False
42
+ dump_file_1: str = "my_dump_file1.dat"
43
+ dump_file_2: str = "my_dump_file2.dat"
44
+ correct_ewald_sphere: int = 0
45
+ max_threads: int = 1
46
+
47
+
48
+ async def handle_job_result_queue(reader, writer, logger):
49
+
50
+ length = await reader.readexactly(4)
51
+ number_of_bytes = int.from_bytes(length, byteorder="little")
52
+ results = await reader.readexactly(number_of_bytes)
53
+ return(results)
54
+
55
+ async def handle_socket_i_have_info(reader, writer, logger):
56
+ data = await reader.readexactly(4)
57
+ length = int.from_bytes(data, byteorder="little")
58
+ data = await reader.readexactly(length)
59
+ print(f"Info: {data.decode('utf-8')}")
60
+
61
+ async def handle_results(reader, writer, logger):
62
+ #logger.info("Handling results")
63
+ await reader.readexactly(4)
64
+ length = await reader.readexactly(4)
65
+ number_of_bytes = int.from_bytes(length, byteorder="little")
66
+ data = await reader.readexactly(number_of_bytes*4)
67
+
68
+ def run(parameters: Union[Reconstruct3dParameters,list[Reconstruct3dParameters]],**kwargs):
69
+
70
+ if not isinstance(parameters, list):
71
+ parameters = [parameters]
72
+ signal_handlers = {
73
+ socket_job_result_queue : handle_job_result_queue,
74
+ socket_i_have_info: handle_socket_i_have_info,
75
+ socket_send_next_job: handle_results
76
+ }
77
+ asyncio.run(cistem_program.run("reconstruct3d", parameters, signal_handlers=signal_handlers,**kwargs))
78
+
79
+
@@ -0,0 +1,106 @@
1
+ import asyncio
2
+ from dataclasses import dataclass
3
+ from typing import Union
4
+ import numpy as np
5
+ import struct
6
+ import mrcfile
7
+ from functools import partial
8
+
9
+
10
+ from pycistem.programs import cistem_program
11
+ from pycistem.programs._cistem_constants import socket_program_defined_result, socket_job_result_queue, socket_i_have_an_error
12
+ from pycistem.core import Image
13
+
14
+ @dataclass
15
+ class RefineCtfParameters:
16
+ input_particle_images: str
17
+ input_star_filename: str
18
+ input_reconstruction: str
19
+ input_reconstruction_statistics: str = "my_statistics.txt"
20
+ use_statistics: bool = False
21
+ ouput_star_filename: str = "/dev/null"
22
+ ouput_shift_filename: str = "/dev/null"
23
+ ouput_phase_difference_image: str = "/dev/null"
24
+ ouput_beamtilt_image: str = "/dev/null"
25
+ ouput_difference_image: str = "/dev/null"
26
+ first_particle: int = 0
27
+ last_particle: int = 0
28
+ pixel_size: float = 1.0
29
+ molecular_mass_kDa: float = 300.0
30
+ inner_mask_radius: float = 0.0
31
+ outer_mask_radius: float = 150.0
32
+ low_resolution_limit: float = 300.0
33
+ high_resolution_limit: float = 3.0
34
+ defocus_search_range: float = 500.0
35
+ defocus_step: float = 50.0
36
+ padding: float = 1.0
37
+ ctf_refinement: bool = False
38
+ beamtilt_refinement: bool = False
39
+ normalize_particles: bool = True
40
+ invert_contrast: bool = False
41
+ exclude_blank_edges: bool = True
42
+ normalize_input_3d: bool = True
43
+ threshold_input_3d: bool = False
44
+ job_number_from_gui: int = 0
45
+ expected_number_of_results_from_gui: int = 0
46
+ max_threads: int = 1
47
+
48
+ def get_np_arrays(bytes,o,i,x,y,numpix):
49
+ array = np.frombuffer(bytes,offset=o+i*numpix*4, count=numpix,dtype=np.float32).copy()
50
+ array = array.reshape((y,-1))
51
+ array = array[:,:x]
52
+ return array
53
+
54
+ async def handle_results(reader, writer, logger, parameters):
55
+ #logger.info("Handling results")
56
+ size_of_array= await reader.readexactly(4)
57
+ result_number= await reader.readexactly(4)
58
+ number_of_expected_results= await reader.readexactly(4)
59
+ number_of_floats = int.from_bytes(size_of_array, byteorder="little")
60
+ result_number = int.from_bytes(result_number, byteorder="little")
61
+ number_of_expected_results = int.from_bytes(number_of_expected_results, byteorder="little")
62
+ results = await reader.readexactly(number_of_floats*4)
63
+ x_dim = int(struct.unpack("<f",results[0:4])[0])
64
+ y_dim = int(struct.unpack("<f",results[4:8])[0])
65
+ num_pixels = int(struct.unpack("<f",results[8:12])[0])
66
+ images_to_process = int(struct.unpack("<f",results[12:16])[0])
67
+ voltage_kV = struct.unpack("<f",results[16:20])[0]
68
+ spherical_aberration_mm = struct.unpack("<f",results[20:24])[0]
69
+ phase_difference_image = get_np_arrays(results,24,0,x_dim,y_dim,num_pixels)
70
+ phase_difference_image_cistem = Image()
71
+
72
+ phase_difference_image_cistem.Allocate(x_dim,y_dim,1,False,True)
73
+ np.copyto(phase_difference_image_cistem.real_values , phase_difference_image)
74
+ phase_difference_image_cistem.DivideByConstant(images_to_process)
75
+ phase_difference_image_cistem.CosineMask(0.45, parameters[result_number].pixel_size / 20.0, False, False, 0.0)
76
+ phase_difference_image_cistem.QuickAndDirtyWriteSlice(parameters[result_number].ouput_phase_difference_image,1,True,0.0)
77
+ #mrcfile.write("test.mrc", phase_difference_image.astype(np.float32), overwrite=True)
78
+ return(results)
79
+
80
+ async def handle_job_result_queue(reader, writer, logger):
81
+
82
+ length = await reader.readexactly(4)
83
+ number_of_bytes = int.from_bytes(length, byteorder="little")
84
+ results = await reader.readexactly(number_of_bytes)
85
+ return(results)
86
+
87
+ async def handle_i_have_an_error(reader, writer, logger):
88
+ print("I have an error")
89
+ #number_of_bytes = int.from_bytes(length, byteorder="little")
90
+ number_of_bytes = 40
91
+ results = await reader.read(number_of_bytes)
92
+ print(results)
93
+ return(results)
94
+
95
+ def run(parameters: Union[RefineCtfParameters,list[RefineCtfParameters]],**kwargs):
96
+
97
+ if not isinstance(parameters, list):
98
+ parameters = [parameters]
99
+ for i, par in enumerate(parameters):
100
+ par.image_number_for_gui = i
101
+ signal_handlers = {
102
+ socket_program_defined_result : partial(handle_results,parameters=parameters),
103
+ socket_job_result_queue : handle_job_result_queue,
104
+ socket_i_have_an_error: handle_i_have_an_error
105
+ }
106
+ byte_results = asyncio.run(cistem_program.run("refine_ctf", parameters, signal_handlers=signal_handlers,**kwargs))
@@ -0,0 +1,30 @@
1
+ import asyncio
2
+
3
+ from dataclasses import dataclass
4
+ from typing import Union, List
5
+
6
+
7
+ from pycistem.programs import cistem_program
8
+ from pycistem.programs._cistem_constants import socket_send_next_job
9
+
10
+ @dataclass
11
+ class ResampleParameters:
12
+ input_filename: str
13
+ output_filename: str
14
+ is_a_volume: bool = False
15
+ new_x_size: int = 64
16
+ new_y_size: int = 64
17
+ new_z_size: int = 1
18
+
19
+
20
+
21
+
22
+
23
+ def run(parameters: Union[ResampleParameters,list[ResampleParameters]],**kwargs):
24
+
25
+ if not isinstance(parameters, list):
26
+ parameters = [parameters]
27
+
28
+ asyncio.run(cistem_program.run("resample", parameters, **kwargs))
29
+
30
+
@@ -83,7 +83,7 @@ def parameters_from_database(database, decolace=False, **kwargs):
83
83
  ) for i,movie in movie_info.iterrows()]
84
84
  return(par)
85
85
 
86
- def write_results_to_database(database, parameters, results):
86
+ def write_results_to_database(database, parameters, results, change_image_assets=True):
87
87
  conn = sqlite3.connect(database)
88
88
  cur = conn.cursor()
89
89
  results = sorted(results, key=lambda x: x["parameter_index"])
@@ -151,9 +151,9 @@ def write_results_to_database(database, parameters, results):
151
151
  #else:
152
152
  # image_asset_id = existing_image_asset[0]
153
153
 
154
-
155
- cur.execute("REPLACE INTO IMAGE_ASSETS (IMAGE_ASSET_ID, NAME, FILENAME, POSITION_IN_STACK, PARENT_MOVIE_ID, ALIGNMENT_ID, CTF_ESTIMATION_ID, X_SIZE, Y_SIZE, PIXEL_SIZE, VOLTAGE, SPHERICAL_ABERRATION, PROTEIN_IS_WHITE, ORIGINAL_X_SIZE, ORIGINAL_Y_SIZE, CROP_CENTER_X, CROP_CENTER_Y) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
156
- (image_asset_id, movie_info[3], parameters[result["parameter_index"]].output_filename, 1,movie_info[2],max_alignment_id+1, -1, int(xsize), int(ysize), actual_pixel_size, parameters[result["parameter_index"]].acceleration_voltage, movie_info[5], movie_info[4],result["orig_x"],result["orig_y"],result["crop_x"],result["crop_y"]))
154
+ if change_image_assets:
155
+ cur.execute("REPLACE INTO IMAGE_ASSETS (IMAGE_ASSET_ID, NAME, FILENAME, POSITION_IN_STACK, PARENT_MOVIE_ID, ALIGNMENT_ID, CTF_ESTIMATION_ID, X_SIZE, Y_SIZE, PIXEL_SIZE, VOLTAGE, SPHERICAL_ABERRATION, PROTEIN_IS_WHITE, ORIGINAL_X_SIZE, ORIGINAL_Y_SIZE, CROP_CENTER_X, CROP_CENTER_Y) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
156
+ (image_asset_id, movie_info[3], parameters[result["parameter_index"]].output_filename, 1,movie_info[2],max_alignment_id+1, -1, int(xsize), int(ysize), actual_pixel_size, parameters[result["parameter_index"]].acceleration_voltage, movie_info[5], movie_info[4],result["orig_x"],result["orig_y"],result["crop_x"],result["crop_y"]))
157
157
  MOVIE_ALIGNMENT_PARAMETERS = {
158
158
  "FRAME_NUMBER": range(1,len(result["x_shifts"])+1),
159
159
  "X_SHIFT": result["x_shifts"],
@@ -0,0 +1 @@
1
+ from .extract_particles import extract_particles
@@ -0,0 +1,36 @@
1
+ from pathlib import Path
2
+
3
+ def extract_particles(starfile_filename: Path, stack_filename: Path, box_size: int = 256):
4
+ """Extract particles from a star file and save them as individual images.
5
+
6
+ Args:
7
+ starfile_filename (str): The filename of the star file.
8
+ box_size (int, optional): The size of the extracted particles. Defaults to 256.
9
+ """
10
+ import starfile
11
+ import mrcfile
12
+ import numpy as np
13
+ from itertools import groupby
14
+ particle_info = starfile.read(starfile_filename)
15
+ mrc = mrcfile.new_mmap(stack_filename, (len(particle_info), box_size, box_size), mrc_mode=2, overwrite=True)
16
+ # Iterate over groupby cisTEMOriginalImageFilename
17
+
18
+ for micrograph_filename, subparticles in groupby(particle_info.itertuples(), lambda x: x.cisTEMOriginalImageFilename):
19
+ micrograph = mrcfile.open(micrograph_filename)
20
+ if micrograph.data.ndim == 3:
21
+ micrograph_data = micrograph.data[0].copy()
22
+ else:
23
+ micrograph_data = micrograph.data.copy()
24
+ for particle in subparticles:
25
+ x = round(particle.cisTEMOriginalXPosition/particle.cisTEMPixelSize)
26
+ y = round(particle.cisTEMOriginalYPosition/particle.cisTEMPixelSize)
27
+ particle_image = micrograph_data.take(range(y-box_size//2, y+box_size//2), mode='clip', axis=0).take(range(x-box_size//2, x+box_size//2), mode='clip', axis=1)
28
+ if particle_image.shape != (box_size, box_size):
29
+ raise ValueError(f"Particle at {x},{y} from micrograph {micrograph_filename} {micrograph_data.shape} is out of bounds {particle_image.shape}.")
30
+
31
+ particle_image -= particle_image.mean()
32
+ particle_image /= particle_image.std()
33
+ mrc.data[particle.cisTEMPositionInStack-1] = particle_image
34
+ yield
35
+ mrc.close()
36
+ return
@@ -0,0 +1,34 @@
1
+ import pandas as pd
2
+ from typing import Union
3
+ import sqlite3
4
+ import typer
5
+ from pathlib import Path
6
+ from typing_extensions import Annotated
7
+ import starfile
8
+
9
+ app = typer.Typer()
10
+
11
+
12
+ @app.command()
13
+ def plot(database: Annotated[Path, typer.Argument(...,help="The database file to use")],
14
+ refinement_id: Annotated[int, typer.Argument(...,help="The refinement to use")],
15
+ class_id: Annotated[int, typer.Argument(...,help="The class to cus")],
16
+ input_star_file: Annotated[Path, typer.Argument(...,help="The starfile to use")],
17
+ output_star_file: Annotated[Path, typer.Argument(...,help="The starfile to write to")],
18
+ ):
19
+ db = sqlite3.connect(database)
20
+ refinements = []
21
+ refinment_info = pd.read_sql_query(f"SELECT * FROM REFINEMENT_LIST WHERE REFINEMENT_ID = {refinement_id}", db).iloc[0]
22
+ refinment_result = pd.read_sql_query(f"SELECT * FROM REFINEMENT_RESULT_{refinement_id}_{class_id}", db)
23
+ refinment_package_info = pd.read_sql_query(f"SELECT * FROM REFINEMENT_PACKAGE_CONTAINED_PARTICLES_{refinment_info['REFINEMENT_PACKAGE_ASSET_ID']}", db)
24
+
25
+ starfile_info = starfile.read(input_star_file)
26
+
27
+ original_ids = refinment_package_info["ORIGINAL_PARTICLE_POSITION_ASSET_ID"].to_list()
28
+ subset_starfile_info = starfile_info.iloc[original_ids].copy()
29
+ subset_starfile_info["cisTEMOccupancy"] = refinment_result["OCCUPANCY"].to_list()
30
+ subset_starfile_info["cisTEMScore"] = refinment_result["SCORE"].to_list()
31
+ starfile.write(subset_starfile_info, output_star_file)
32
+
33
+ if __name__ == "__main__":
34
+ app()
@@ -0,0 +1,26 @@
1
+ import pandas as pd
2
+ from typing import Union
3
+ import sqlite3
4
+ import typer
5
+ from pathlib import Path
6
+ from typing_extensions import Annotated
7
+ import matplotlib.pyplot as plt
8
+
9
+ app = typer.Typer()
10
+
11
+
12
+ @app.command()
13
+ def plot(database: Annotated[Path, typer.Argument(...,help="The database file to use")],
14
+ start_refinement: Annotated[int, typer.Argument(...,help="The refinement to start from")]):
15
+ db = sqlite3.connect(database)
16
+ refinment_info = pd.read_sql_query(f"SELECT * FROM REFINEMENT_LIST WHERE REFINEMENT_ID = {start_refinement}", db)
17
+ class_info = pd.read_sql_query(f"SELECT * FROM REFINEMENT_DETAILS_{refinment_info['REFINEMENT_ID'].values[0]}", db)
18
+ class_info.sort_values(by="AVERAGE_OCCUPANCY", inplace=True)
19
+ # print as a table the CLASS_NUMBER and AVERAGE_OCCUPANCY
20
+ print(class_info[['CLASS_NUMBER', 'AVERAGE_OCCUPANCY']])
21
+
22
+
23
+
24
+
25
+ if __name__ == "__main__":
26
+ app()
@@ -0,0 +1,51 @@
1
+ import pandas as pd
2
+ from typing import Union
3
+ import sqlite3
4
+ import typer
5
+ from pathlib import Path
6
+ from typing_extensions import Annotated
7
+ import matplotlib.pyplot as plt
8
+
9
+ app = typer.Typer()
10
+
11
+ def return_class_occupancies(row, db):
12
+ class_info = pd.read_sql_query(f"SELECT * FROM REFINEMENT_DETAILS_{row['REFINEMENT_ID']}", db)
13
+ return [row['REFINEMENT_ID']] + class_info['AVERAGE_OCCUPANCY'].to_list()
14
+
15
+ def return_num_part_with_occ_higher_than(row, db, min_occ):
16
+ class_info = pd.read_sql_query(f"SELECT * FROM REFINEMENT_DETAILS_{row['REFINEMENT_ID']}", db)
17
+ result = [row['REFINEMENT_ID']]
18
+ for class_id in class_info['CLASS_NUMBER']:
19
+ particle_info = pd.read_sql_query(f"SELECT OCCUPANCY FROM REFINEMENT_RESULT_{row['REFINEMENT_ID']}_{class_id}", db)
20
+ result.append(len(particle_info[particle_info['OCCUPANCY'] > min_occ]))
21
+ print(result)
22
+ return result
23
+
24
+ @app.command()
25
+ def plot(database: Annotated[Path, typer.Argument(...,help="The database file to use")],
26
+ start_refinement: Annotated[int, typer.Argument(...,help="The refinement to start from")]):
27
+ db = sqlite3.connect(database)
28
+ refinements = []
29
+ refinment_info = pd.read_sql_query(f"SELECT * FROM REFINEMENT_LIST WHERE REFINEMENT_ID = {start_refinement}", db)
30
+ while len(refinment_info) == 1:
31
+ reference_refinement_id = refinment_info["REFINEMENT_ID"].values[0]
32
+ refinements.append(refinment_info)
33
+ refinment_info = pd.read_sql_query(f"SELECT * FROM REFINEMENT_LIST WHERE STARTING_REFINEMENT_ID = {reference_refinement_id}", db)
34
+ refinements = pd.concat(refinements)
35
+ av_occs = refinements.apply(return_class_occupancies, axis=1, result_type='expand', args=(db,))
36
+ av_occs.plot(x=0)
37
+ plt.savefig("average_occupancies.png")
38
+ plt.figure()
39
+ occ_0_5 = refinements.apply(return_num_part_with_occ_higher_than, axis=1, result_type='expand', args=(db, 50))
40
+ occ_0_5.plot(x=0)
41
+ plt.savefig("occupancies_0_5.png")
42
+ plt.figure()
43
+ occ_0_9 = refinements.apply(return_num_part_with_occ_higher_than, axis=1, result_type='expand', args=(db, 90))
44
+ occ_0_9.plot(x=0)
45
+ plt.savefig("occupancies_0_9.png")
46
+
47
+
48
+
49
+
50
+ if __name__ == "__main__":
51
+ app()
@@ -0,0 +1,90 @@
1
+ import pandas as pd
2
+ from typing import Union
3
+ import sqlite3
4
+ import typer
5
+ from pathlib import Path
6
+ from typing_extensions import Annotated
7
+ import matplotlib.pyplot as plt
8
+ import matplotlib.animation as animation
9
+
10
+ app = typer.Typer()
11
+
12
+ def return_fsc(db,refinement,classid):
13
+ fsc = pd.read_sql_query(f"SELECT * FROM REFINEMENT_RESOLUTION_STATISTICS_{refinement}_{classid}", db)
14
+ return(fsc)
15
+
16
+
17
+
18
+
19
+ @app.command()
20
+ def plot(database: Annotated[Path, typer.Argument(...,help="The database file to use")],
21
+ start_refinement: Annotated[int, typer.Argument(...,help="The refinement to start from")]):
22
+ db = sqlite3.connect(database)
23
+ refinements = []
24
+ refinment_info = pd.read_sql_query(f"SELECT * FROM REFINEMENT_LIST WHERE REFINEMENT_ID = {start_refinement}", db)
25
+ while len(refinment_info) == 1:
26
+ reference_refinement_id = refinment_info["REFINEMENT_ID"].values[0]
27
+ refinements.append(refinment_info)
28
+ refinment_info = pd.read_sql_query(f"SELECT * FROM REFINEMENT_LIST WHERE STARTING_REFINEMENT_ID = {reference_refinement_id}", db)
29
+ refinements = pd.concat(refinements)
30
+
31
+ fscs = []
32
+ for i in range(refinements.iloc[0]['NUMBER_OF_CLASSES']):
33
+ dat = return_fsc(db,refinements.iloc[0]['REFINEMENT_ID'],i+1)
34
+ dat["CLASS"] = i+1
35
+ fscs.append(dat)
36
+ dat = pd.concat(fscs)
37
+
38
+
39
+
40
+ fig, ax = plt.subplots()
41
+ linplts = {}
42
+ for key, grp in dat[dat['RESOLUTION'] > 4.0].groupby(['CLASS']):
43
+ linplts[key] = plt.plot(grp['SHELL'], grp['PART_FSC'],label=key)[0]
44
+ print(linplts)
45
+ plt.ylim(0,1.05)
46
+
47
+
48
+ def update(frame):
49
+ fscs = []
50
+ for i in range(refinements.iloc[frame]['NUMBER_OF_CLASSES']):
51
+ dat = return_fsc(db,refinements.iloc[frame]['REFINEMENT_ID'],i+1)
52
+ dat["CLASS"] = i+1
53
+ fscs.append(dat)
54
+ dat = pd.concat(fscs)
55
+ for key, grp in dat[dat['RESOLUTION'] > 4.0].groupby(['CLASS']):
56
+ print(key)
57
+ linplts[key].set_xdata(grp['SHELL'])
58
+ linplts[key].set_ydata(grp['PART_FSC'])
59
+
60
+
61
+
62
+
63
+ return linplts.values()
64
+
65
+
66
+ ani = animation.FuncAnimation(fig=fig, func=update, frames=len(refinements), interval=300)
67
+ plt.show()
68
+
69
+ return()
70
+
71
+
72
+
73
+
74
+ av_occs = refinements.apply(return_class_occupancies, axis=1, result_type='expand', args=(db,))
75
+ av_occs.plot(x=0)
76
+ plt.savefig("average_occupancies.png")
77
+ plt.figure()
78
+ occ_0_5 = refinements.apply(return_num_part_with_occ_higher_than, axis=1, result_type='expand', args=(db, 0.5))
79
+ occ_0_5.plot(x=0)
80
+ plt.savefig("occupancies_0_5.png")
81
+ plt.figure()
82
+ occ_0_9 = refinements.apply(return_num_part_with_occ_higher_than, axis=1, result_type='expand', args=(db, 0.9))
83
+ occ_0_9.plot(x=0)
84
+ plt.savefig("occupancies_0_9.png")
85
+
86
+
87
+
88
+
89
+ if __name__ == "__main__":
90
+ app()
@@ -0,0 +1,129 @@
1
+ import pandas as pd
2
+ from typing import Union
3
+ import sqlite3
4
+ import starfile
5
+ import typer
6
+ from pathlib import Path
7
+ from typing_extensions import Annotated
8
+ from collections import defaultdict
9
+ import matplotlib.pyplot as plt
10
+ import numpy as np
11
+ import matplotlib.colors as mcolors
12
+ app = typer.Typer()
13
+
14
+
15
+ def return_num_part_per_cond_with_occ_higher_than(row, db, min_occ, tmp):
16
+ class_info = pd.read_sql_query(f"SELECT * FROM REFINEMENT_DETAILS_{row['REFINEMENT_ID']}", db)
17
+ all_particle_info = pd.read_sql_query(f"SELECT * FROM REFINEMENT_PACKAGE_CONTAINED_PARTICLES_{row['REFINEMENT_PACKAGE_ASSET_ID']}", db)
18
+
19
+ result = []
20
+ for class_id in class_info['CLASS_NUMBER']:
21
+ particle_info = pd.read_sql_query(f"SELECT POSITION_IN_STACK,PSI,OCCUPANCY FROM REFINEMENT_RESULT_{row['REFINEMENT_ID']}_{class_id}", db)
22
+ particle_info = particle_info.merge(all_particle_info, on="POSITION_IN_STACK")
23
+ particle_info = particle_info.merge(tmp, left_on='ORIGINAL_PARTICLE_POSITION_ASSET_ID', right_index=True)
24
+ particle_info['experimental_condition'] = particle_info['cisTEMOriginalImageFilename'].str.split("/").str[-4]
25
+ particle_info = particle_info[particle_info['OCCUPANCY'] > min_occ]
26
+ result.append(particle_info['experimental_condition'].value_counts())
27
+ return result
28
+
29
+ @app.command()
30
+ def plot(database: Annotated[Path, typer.Argument(...,help="The database file to use")],
31
+ start_refinement: Annotated[int, typer.Argument(...,help="The refinement to start from")],
32
+ starfile_filename: Annotated[Path, typer.Argument(...,help="The starfile to use")]):
33
+ db = sqlite3.connect(database)
34
+ tmp = starfile.read(starfile_filename)
35
+ counts_in_cond = defaultdict(defaultdict[lambda: 0])
36
+ refinment_info = pd.read_sql_query(f"SELECT * FROM REFINEMENT_LIST WHERE REFINEMENT_ID = {start_refinement}", db)
37
+ number_per_class = return_num_part_per_cond_with_occ_higher_than(refinment_info.iloc[0], db, 0.9, tmp)
38
+ for class_id, class_counts in enumerate(number_per_class):
39
+ for cond, count in class_counts.items():
40
+ exp = cond
41
+ if class_id not in counts_in_cond[exp]:
42
+ counts_in_cond[exp][class_id] = count
43
+ else:
44
+ counts_in_cond[exp][class_id] += count
45
+ condition_list = sorted(list(counts_in_cond.keys()))
46
+ ucs = {}
47
+ for i, condition in enumerate(condition_list):
48
+ exp = condition.split("_")[2]
49
+ ucs[exp] = 1
50
+
51
+ #create a categorical colormap using ListedColormap
52
+ cmap = mcolors.ListedColormap(mcolors.TABLEAU_COLORS.values())
53
+
54
+ # get the number of colors in the colormap
55
+ num_colors = cmap.N
56
+
57
+ # create a dictionary mapping each label to a color index
58
+ color_indices = {label: i for i, label in enumerate(ucs.keys())}
59
+
60
+ cond_position = {'24hbr': 3, '48hbr': 4, 'C': 2}
61
+ states_this= {
62
+ 'A/P/eEF1a' : [15,19,8,4,18,3],
63
+ 'eEF2a': [11,14,10,20,9,12],
64
+ 'pA/P' : [5,2],
65
+ 'pA/P/eEF2': [6],
66
+
67
+ }
68
+ states_pivot = { v:k for k in states_this for v in states_this[k]}
69
+
70
+ datas = defaultdict(list)
71
+ state_count = {}
72
+ for cond in cond_position.keys():
73
+ state_count[cond] = defaultdict(lambda: 0)
74
+
75
+ for i, condition in enumerate(condition_list):
76
+ #if condition.split("_")[0] != "20230317":
77
+ # continue
78
+ exp = condition.split("_")[2]
79
+
80
+ counts = np.zeros(20)
81
+ for i in range(20):
82
+ if i in counts_in_cond[condition]:
83
+ counts[i] = counts_in_cond[condition][i]
84
+
85
+ if np.sum(counts) < 1000:
86
+ continue
87
+ #make colorkei for exp
88
+ for classid, frac in enumerate(counts):
89
+ if classid+1 in states_pivot:
90
+ state_count[exp][states_pivot[classid+1]] += frac
91
+ datas[classid * 5 + cond_position[exp]].append(frac/np.sum(counts))
92
+ #plt.plot(np.arange(len(counts))+1+i*0.1,counts/np.sum(counts), 'o', color=cmap(color_indices[exp]))
93
+
94
+
95
+ pos = list(datas.keys())
96
+ data = list(datas.values())
97
+ state_percent = {}
98
+ for cond in state_count.keys():
99
+ state_percent[cond] = {}
100
+ par_sum = np.sum(list(state_count[cond].values()))
101
+ for state in state_count[cond].keys():
102
+ state_percent[cond][state] = state_count[cond][state]/par_sum
103
+ print(state_percent)
104
+ fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(13, 6))
105
+
106
+ bplot = ax.boxplot(data, positions=pos, widths=0.7, patch_artist=True,)
107
+
108
+ colors = ["red","green","green","yellow","orange"]
109
+ for i,patch in enumerate(bplot['boxes']):
110
+ print(i)
111
+ patch.set_facecolor(colors[pos[i]%5])
112
+ ax.set_title('Class occupancies grouped by condition', fontsize=10)
113
+ from matplotlib.lines import Line2D
114
+ legend_elements = [Line2D([0], [0], marker='o', color='green', label='Control',
115
+ markerfacecolor='green', markersize=10),
116
+ Line2D([0], [0], marker='o', color='yellow', label='24h brequinar',
117
+ markerfacecolor='yellow', markersize=10),
118
+ Line2D([0], [0], marker='o', color='orange', label='48h brequinar',
119
+ markerfacecolor='orange', markersize=10)]
120
+ ax.legend(handles=legend_elements, loc='upper left')
121
+ plt.xticks(ticks = np.arange(20)*5 +3 , labels = np.arange(20)+1)
122
+ plt.savefig("condition.png")
123
+
124
+
125
+
126
+
127
+
128
+ if __name__ == "__main__":
129
+ app()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pycistem
3
- Version: 0.3.5
3
+ Version: 0.4.1
4
4
  Summary: Python tools to run cisTEM programs and code
5
5
  Project-URL: Documentation, https://github.com/jojoelfe/pycistem#readme
6
6
  Project-URL: Issues, https://github.com/jojoelfe/pycistem/issues
@@ -0,0 +1,36 @@
1
+ pycistem/__init__.py,sha256=XZu46-aF_8gLVnOVd3lZMlY3kODX7BefoWRkjG53mno,138
2
+ pycistem/config.py,sha256=kmI0GlEXdj3OQF3dfG0yLcvtnfD-f3Yu9QxTaxDq2Z8,569
3
+ pycistem/core/__init__.py,sha256=fvT4XAcUTVWpRpzX5tJC8K5L9Jpl4EeH23M0eB-TJi4,33
4
+ pycistem/core/core.cpp,sha256=dI1ovIDraXetBo09mTOabyM4hAgPFIQndjT8uvayLwg,55905
5
+ pycistem/core/database.cpp,sha256=pJc031gr89v4wSltEiQ9dGgyZ_gZOZ2gPEOdQqkgzJ4,25174
6
+ pycistem/core/euler_search.cpp,sha256=bKJTSXILbXjUgoZKv63HzOscwkOJBhspetIdnoT3cmI,2986
7
+ pycistem/core/run_profiles.cpp,sha256=yWxplGb2lF-ck9Ov9RTN1M-Y8_0v1qpLPx7WFPQ4rV0,5019
8
+ pycistem/database/__init__.py,sha256=W6kWXa7rbIFGnAPixobO1ctnqZyCQE2EVXTx25cwNNY,13708
9
+ pycistem/programs/__init__.py,sha256=JBwtNchlzyt-sT_FCOmasIUVdOhgr2YYlxGN8HXdJB8,465
10
+ pycistem/programs/_cistem_constants.py,sha256=UarwJf1hG-XmzOm55gdxVpAVqVBXgDGCpwrw-fROBw0,1073
11
+ pycistem/programs/apply_ctf.py,sha256=T3CD8aM4EHARY3rbJEjI0mGHrN84XCS_FEgcyFsPMPE,2090
12
+ pycistem/programs/cistem_program.py,sha256=fuuCB0nzfyvSSx8X19xIPMVkZS08oEwWg7atDGb2N9Q,9662
13
+ pycistem/programs/ctffind.py,sha256=CdEyBSy9Ax6rkwUSCDo0roaXhyfLEmTOFXFNvF-z7aI,11999
14
+ pycistem/programs/estimate_beamtilt.py,sha256=3xU254DpVuNUWqS3kXeB8QbA_mfEay8s29QIxBmfUzM,2192
15
+ pycistem/programs/match_template.py,sha256=7S-jUr9ZqXCTLeDOp99Q_YERxR1EziHxIFb3QbmxfNU,15792
16
+ pycistem/programs/reconstruct3d.py,sha256=vXzg7gSPl34EWRsuXc21GCPjkpfwx3bC8xu7pWI2RZk,2840
17
+ pycistem/programs/refine_ctf.py,sha256=_e08Zpf4M8Fy9fBQ2WtS6v_C8od36SUwS4jEFnelHrE,4454
18
+ pycistem/programs/refine_template.py,sha256=pYcKvxhrnNVc-e_F-_SWefDRv3oc8WHPiJ2Vc6BzVNA,6850
19
+ pycistem/programs/refine_template_dev.py,sha256=mwapt1Xaryf7bMuoblQc0cczCIFLiSHU4FvWprVT9xY,802
20
+ pycistem/programs/refine_template_niko.py,sha256=4ANTZNC5JyNVRzA55wfQvjCty68c_-f6_8LePE6oca0,5730
21
+ pycistem/programs/resample.py,sha256=JOZHbLz3yJne6aT3zDg0c2SWac7G7Q7cC14oAwUP1wM,648
22
+ pycistem/programs/run_profile.py,sha256=iDeFxdhzYvGnRStVEAlu6rb5d91j96XneUKSznSR6JY,431
23
+ pycistem/programs/unblur.py,sha256=nEnAYvL77q4Poa88FEOX7UsQflsjujBLSz8kJYLqrRc,12242
24
+ pycistem/programs/unblur_patch.py,sha256=DIWLoAPHUk3XuJ8zDGEsDgKi4IJ97fw4QNEWphL6iTc,10306
25
+ pycistem/utils/__init__.py,sha256=kh4WR0GtPw_ljuJVvv75j8kkRF2w8hja3IYb2BregUs,48
26
+ pycistem/utils/extract_particles.py,sha256=QqJqmzsXosGEhDh_lflUrge3kzuOBXfQyxVlKpZhX-4,1849
27
+ pycistem/utils/move_class_into_unbinned_particle_stack.py,sha256=ie6b0-4GvyapAhRklqHPPt6jRiLrGY26vFtUb9Gv5tI,1565
28
+ pycistem/utils/order_by_class_occupancy.py,sha256=X1hOnoxtnT0-_0gZcQXJawkpVOyJATZCPoDC3fuICrs,919
29
+ pycistem/utils/plot_class_occupancy.py,sha256=sI7P7L2X8gULg0crc1pNxaJSwYWxORPkw5J6-BviMHQ,2217
30
+ pycistem/utils/plot_classification_fsc_movie.py,sha256=d81aaOVugzkhZPEY1EZX3STWiU-anmkp4sPhjj-PN9Y,2847
31
+ pycistem/utils/plot_occupancy_by_condition.py,sha256=tSa0LYw2HELs50PYqlI3ph37Y8krNL40MWwQGqkK-68,5478
32
+ pycistem/core/core.cpython-310-x86_64-linux-gnu.so,sha256=1ewsoXSquTm593JQe8u6JYV54ApY9Y5VzhmyhD2tfDk,10516016
33
+ pycistem-0.4.1.dist-info/METADATA,sha256=p9Yn3G2V1nR6ffhR7ou3TWdKUAdICCHCnaPV7MNxGFs,14517
34
+ pycistem-0.4.1.dist-info/WHEEL,sha256=MDbHf7cDPMxTZ47uu_EsVWGUAyqzhS8DTBXKmO0wMaI,109
35
+ pycistem-0.4.1.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
36
+ pycistem-0.4.1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.18.0
2
+ Generator: hatchling 1.21.1
3
3
  Root-Is-Purelib: false
4
4
  Tag: cp310-cp310-manylinux_2_27_x86_64
@@ -1,25 +0,0 @@
1
- pycistem/__init__.py,sha256=Ttm0g5ElZOQuJSmY9u_dg4ne7i3zH7pg1WEunerNsO0,131
2
- pycistem/config.py,sha256=kmI0GlEXdj3OQF3dfG0yLcvtnfD-f3Yu9QxTaxDq2Z8,569
3
- pycistem/core/__init__.py,sha256=fvT4XAcUTVWpRpzX5tJC8K5L9Jpl4EeH23M0eB-TJi4,33
4
- pycistem/core/core.cpp,sha256=L3OWaVhZSyrgM-6e3b-1KhFHr-8ktuxYkTzTADxVnsM,55866
5
- pycistem/core/database.cpp,sha256=pJc031gr89v4wSltEiQ9dGgyZ_gZOZ2gPEOdQqkgzJ4,25174
6
- pycistem/core/euler_search.cpp,sha256=bKJTSXILbXjUgoZKv63HzOscwkOJBhspetIdnoT3cmI,2986
7
- pycistem/core/run_profiles.cpp,sha256=yWxplGb2lF-ck9Ov9RTN1M-Y8_0v1qpLPx7WFPQ4rV0,5019
8
- pycistem/database/__init__.py,sha256=vXaaxnJp5hNO69I7cC6xL6AeuHx5mEh2ryXRxD-Zs3s,13030
9
- pycistem/programs/__init__.py,sha256=hgNeKgKGNKC1qmHXqa3fextal740IRlpQxrgQ0aewOg,286
10
- pycistem/programs/_cistem_constants.py,sha256=Lzn1yV-GiUuvERoyPjpwsj6vxJBYD7coya-vuvK7VfI,1073
11
- pycistem/programs/apply_ctf.py,sha256=OP9jw7C0t4moc9EJeqYhb58DafUwUUJbySPFbNxJb1U,2072
12
- pycistem/programs/cistem_program.py,sha256=BQxR5SuCzJu6MDae3DXUEHemMuUcGkvnoDVXpKTZKC0,9515
13
- pycistem/programs/ctffind.py,sha256=CdEyBSy9Ax6rkwUSCDo0roaXhyfLEmTOFXFNvF-z7aI,11999
14
- pycistem/programs/match_template.py,sha256=p7YgJiE6sHtsyY6PMbWZbi6hw7c9CrEUACSMX-_Ov1Q,15664
15
- pycistem/programs/refine_template.py,sha256=pYcKvxhrnNVc-e_F-_SWefDRv3oc8WHPiJ2Vc6BzVNA,6850
16
- pycistem/programs/refine_template_dev.py,sha256=mwapt1Xaryf7bMuoblQc0cczCIFLiSHU4FvWprVT9xY,802
17
- pycistem/programs/refine_template_niko.py,sha256=4ANTZNC5JyNVRzA55wfQvjCty68c_-f6_8LePE6oca0,5730
18
- pycistem/programs/run_profile.py,sha256=iDeFxdhzYvGnRStVEAlu6rb5d91j96XneUKSznSR6JY,431
19
- pycistem/programs/unblur.py,sha256=rX11xb2kmIhv6aIr4_PZfszLqvmFq0ws4xN6QYXW-dA,12185
20
- pycistem/programs/unblur_patch.py,sha256=DIWLoAPHUk3XuJ8zDGEsDgKi4IJ97fw4QNEWphL6iTc,10306
21
- pycistem/core/core.cpython-310-x86_64-linux-gnu.so,sha256=1bRliJXBXsIgNrjETcxQbrxazWyBzIdRv_XxzWSyGdI,10486440
22
- pycistem-0.3.5.dist-info/METADATA,sha256=HzCtlEwoh3QV0yQkILqc6A8OFBDdgp3txckoKrLg4zY,14517
23
- pycistem-0.3.5.dist-info/WHEEL,sha256=nminjEtVxPptfkYsuxzltL0oCfM2Nb0dEN2IiTSPl2g,109
24
- pycistem-0.3.5.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
25
- pycistem-0.3.5.dist-info/RECORD,,