cosmic-popsynth 3.6.2__cp313-cp313-macosx_14_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,544 @@
1
+ #!/usr/bin/env python
2
+ # Code: cosmic-pop.py
3
+ # Version: 1
4
+ # Version changes: SAMPLE FIXED POPULATION OF BINARIES AND EVOLVE WITH BSE;
5
+ # COMPUTE RATES AND NUMBERS FOR EACH POPULATION ACCORDING
6
+ # TO FLAGS SET BY USER
7
+ #
8
+ # Edited on: 8 SEP 2015
9
+
10
+
11
+ ##############################################################################
12
+ # IMPORT ALL NECESSARY PYTHON PACKAGES
13
+ ##############################################################################
14
+ import argparse
15
+ import schwimmbad
16
+
17
+ import time
18
+ from time import sleep
19
+ import json
20
+
21
+ import numpy as np
22
+ import pandas as pd
23
+ from pandas.errors import PerformanceWarning
24
+ import warnings
25
+ import h5py as h5
26
+
27
+ from cosmic.sample.initialbinarytable import InitialBinaryTable
28
+ from cosmic import Match, utils
29
+ from cosmic.evolve import Evolve
30
+ from cosmic._version import __version__
31
+
32
+ from schwimmbad import MPIPool
33
+ from os import sys
34
+
35
+ def str2bool(v):
36
+ if isinstance(v, bool):
37
+ return v
38
+ if v.lower() in ('yes', 'true', 't', 'y', '1'):
39
+ return True
40
+ elif v.lower() in ('no', 'false', 'f', 'n', '0'):
41
+ return False
42
+ else:
43
+ raise argparse.ArgumentTypeError('Boolean value expected.')
44
+
45
+ ###############################################################################
46
+ # DEFINE COMMANDLINE ARGUMENTS
47
+ ###############################################################################
48
+
49
+ def binfrac_type(value):
50
+ try:
51
+ return float(value)
52
+ except ValueError:
53
+ return value
54
+
55
+ def parse_commandline():
56
+ """Parse the arguments given on the command-line.
57
+ """
58
+ # Parse any inifile specification
59
+ # We make this parser with add_help=False so that
60
+ # it doesn't parse -h and print help.
61
+ conf_parser = argparse.ArgumentParser(
62
+ description=__doc__, # printed with -h/--help
63
+ # Don't mess with format of description
64
+ formatter_class=argparse.RawDescriptionHelpFormatter,
65
+ # Turn off help, so we print all options in response to -h
66
+ add_help=False
67
+ )
68
+ conf_parser.add_argument("--inifile",
69
+ help="Name of ini file of params",
70
+ metavar="FILE",)
71
+ args, remaining_argv = conf_parser.parse_known_args()
72
+
73
+ defaults = {}
74
+ if not (args.inifile is None and (('-h' in remaining_argv) or ('--help' in remaining_argv))):
75
+ BSEDict, seed_int, filters, convergence, sampling = utils.parse_inifile(args.inifile)
76
+ defaults.update(sampling)
77
+ defaults.update(filters)
78
+ defaults.update(convergence)
79
+ defaults.update({'seed' : seed_int})
80
+ defaults.update({'inifile' : args.inifile})
81
+
82
+ # Parse rest of arguments
83
+ # Don't suppress add_help here so it will handle -h
84
+ parser = argparse.ArgumentParser(
85
+ # Inherit options from config_parser
86
+ parents=[conf_parser]
87
+ )
88
+ parser.set_defaults(**defaults)
89
+ parser.add_argument("--final-kstar1",
90
+ help="Specify the final condition of kstar1 "
91
+ ", you want systems to end at for your samples",
92
+ required=True, type=int, nargs='+')
93
+ parser.add_argument("--final-kstar2",
94
+ help="Specify the final condition of kstar2, you want "
95
+ "systems to end at for your samples",
96
+ required=True, type=int, nargs='+')
97
+ parser.add_argument("--Niter",
98
+ help="Number of iterations of binaries "
99
+ "to try, will check ever Nstep for convergence",
100
+ type=int, default=10000000)
101
+ parser.add_argument("--Nstep",
102
+ help="Number of binaries to try before checking for "
103
+ "convergence, it will check ever Nstep binaries until "
104
+ "it reach Niter binaries", type=int, default=10000)
105
+ parser.add_argument("--max-wall-time", type=int, default=3155760,
106
+ help="Maximum wall time (seconds) for sampling binaries")
107
+ parser.add_argument("--binary_state", nargs='+', type=int)
108
+ parser.add_argument("--sampling_method")
109
+ parser.add_argument("--primary_model", help="Chooses the initial primary mass function from: salpeter55, kroupa93, kroupa01", type=str)
110
+ parser.add_argument("--binfrac_model", help="Chooses the binary fraction model from: a float between [0,1], vanHaaften, and offner23", type=binfrac_type)
111
+ parser.add_argument("--ecc_model", help="Chooses the initial eccentricity distribution model from: thermal, uniform, and sana12", type=str)
112
+ parser.add_argument("--porb_model", help="Chooses the initial orbital period distribution model from: log_uniform, sana12, renzo19, raghavan10, moe19, and martinez26", type=str)
113
+ parser.add_argument("--SF_start", help="Sets the time in the past when star formation initiates in Myr", type=float)
114
+ parser.add_argument("--SF_duration", help="Sets the duration of constant star formation in Myr", type=float)
115
+ parser.add_argument("--metallicity", type=float)
116
+ parser.add_argument("--convergence_params", nargs='+', help="specifies the list of parameters for which you"
117
+ " would like to track the distribution shapes for convergence")
118
+ parser.add_argument("--convergence_limits", type=json.loads, help="dictionary that can contain limits for convergence params")
119
+ parser.add_argument("--pop_select", help="Used in combination with the specified final_kstar1 and final_kstar2 values"
120
+ " to select the subpopulation of interest from the evolved population")
121
+ parser.add_argument("--match", type=float, help="provides the tolerance for the convergence calculation")
122
+ parser.add_argument("--apply_convergence_limits", type=str2bool, nargs='?',
123
+ const=True, default=False, help="filters the evolved binary population to contain"
124
+ " only the binaries that satsify the convergence limits")
125
+ parser.add_argument("--seed", type=int)
126
+ parser.add_argument("--verbose", action="store_true", default=False,
127
+ help="Run in Verbose Mode")
128
+ parser.add_argument("--complib",type=str,default="zlib",
129
+ help="HDFStore compression library")
130
+ parser.add_argument("--complevel",type=int,default=0,
131
+ help="HDFStore compression level")
132
+
133
+ group = parser.add_mutually_exclusive_group()
134
+ group.add_argument("-n", "--nproc",
135
+ help="number of processors", type=int, default=1)
136
+ group.add_argument("--mpi", dest="mpi", default=False,
137
+ action="store_true", help="Run with MPI.")
138
+
139
+ args = parser.parse_args(remaining_argv)
140
+
141
+ if len(args.final_kstar1) > 2 or len(args.final_kstar2) > 2:
142
+ raise parser.error('final kstar1 and final kstar2 '
143
+ 'must be either a single value or '
144
+ 'a range between two values.')
145
+
146
+ if (len(args.final_kstar1) == 2):
147
+ if (args.final_kstar1[0] >= args.final_kstar1[1]):
148
+ raise parser.error('Range provided for final-kstar1 invalid')
149
+
150
+ if (len(args.final_kstar2) == 2):
151
+ if (args.final_kstar2[0] >= args.final_kstar2[1]):
152
+ raise parser.error('Range provided for final-kstar2 invalid')
153
+
154
+ if (len(args.final_kstar2) == 1) and (len(args.final_kstar1) == 1):
155
+ if (args.final_kstar2 > args.final_kstar1):
156
+ raise parser.error('final-kstar1 must be greater than or equal to '
157
+ 'final-kstar2.')
158
+
159
+ return args
160
+
161
+ ###############################################################################
162
+ # BEGIN MAIN FUNCTION
163
+ ###############################################################################
164
+ if __name__ == '__main__':
165
+
166
+ # READ COMMANDLINE ARGUMENTS
167
+ ###########################################################################
168
+ args = parse_commandline()
169
+ pool = schwimmbad.choose_pool(mpi=args.mpi, processes=args.nproc)
170
+ if isinstance(pool, MPIPool):
171
+ if not pool.is_master():
172
+ pool.wait()
173
+ sys.exit(0)
174
+ nproc = len(pool.workers)
175
+ else:
176
+ nproc = args.nproc
177
+
178
+ # SET TIME TO TRACK COMPUTATION TIME
179
+ ###########################################################################
180
+ start_time = time.time()
181
+
182
+ # READ AND PARSE INIFILE
183
+ ###########################################################################
184
+ BSEDict, seed_int, filters, convergence, sampling = utils.parse_inifile(args.inifile)
185
+
186
+ # we now overwrite the inifile values with what was specified from the command line
187
+ # (which could mean not overwriting anything at all because they are populated
188
+ # by default from the inifile).
189
+ for argument in vars(args):
190
+ if argument in filters.keys():
191
+ if filters[argument] != getattr(args, argument):
192
+ warnings.warn("You are overriding the inifile value of {0}={1} "
193
+ "with {0}={2} from the commandline".format(argument, filters[argument], getattr(args, argument)))
194
+ filters[argument] = getattr(args, argument)
195
+
196
+ if argument in convergence.keys():
197
+ if convergence[argument] != getattr(args, argument):
198
+ warnings.warn("You are overriding the inifile value of {0}={1} "
199
+ "with {0}={2} from the commandline".format(argument, convergence[argument], getattr(args, argument)))
200
+ convergence[argument] = getattr(args, argument)
201
+
202
+ if argument in sampling.keys():
203
+ if (sampling[argument] == "independent") or (getattr(args, argument) == "independent"):
204
+ for model in ["primary_model", "porb_model", "ecc_model", "binfrac_model"]:
205
+ if (model not in sampling.keys()) and not (getattr(args, model)):
206
+ raise ValueError("You have selected the {0} sampler "
207
+ "but not specified a model for {1} "
208
+ "in the inifile or command line".format(sampling[argument], model))
209
+ if sampling[argument] != getattr(args, argument):
210
+ warnings.warn("You are overriding the inifile value of {0}={1} "
211
+ "with {0}={2} from the commandline".format(argument, sampling[argument], getattr(args, argument)))
212
+ sampling[argument] = getattr(args, argument)
213
+
214
+ if argument == 'seed':
215
+ if getattr(args, argument) != seed_int:
216
+ warnings.warn("You are overriding the inifile value of {0}={1} "
217
+ "with {0}={2} from the commandline".format(argument, seed_int, getattr(args, argument)))
218
+ seed_int = getattr(args, argument)
219
+
220
+ # Check that the values in BSEDict, filters, and convergence are valid
221
+ utils.error_check(BSEDict, filters, convergence, sampling)
222
+
223
+ if seed_int != 0:
224
+ np.random.seed(seed_int)
225
+ else:
226
+ np.random.seed(0)
227
+
228
+ # Set up final_kstar1 and final_kstar2 strings for saved data files
229
+ if len(args.final_kstar1) == 2:
230
+ kstar1_range = np.arange(args.final_kstar1[0], args.final_kstar1[1]+1)
231
+ kstar1_range_string = str(int(args.final_kstar1[0]))+'_'+str(int(args.final_kstar1[1]))
232
+ else:
233
+ kstar1_range = args.final_kstar1
234
+ kstar1_range_string = str(int(args.final_kstar1[0]))
235
+
236
+ if len(args.final_kstar2) == 2:
237
+ kstar2_range = np.arange(args.final_kstar2[0], args.final_kstar2[1]+1)
238
+ kstar2_range_string = str(int(args.final_kstar2[0]))+'_'+str(int(args.final_kstar2[1]))
239
+ else:
240
+ kstar2_range = args.final_kstar2
241
+ kstar2_range_string = str(int(args.final_kstar2[0]))
242
+
243
+ dat_store_fname = 'dat_kstar1_{0}_kstar2_{1}_SFstart_{2}_SFduration_{3}_metallicity_{4}.h5'.format(kstar1_range_string, kstar2_range_string, sampling['SF_start'], sampling['SF_duration'], sampling['metallicity'])
244
+ # Open the hdf5 file to store the fixed population data
245
+ try:
246
+ with pd.HDFStore(dat_store_fname,complib=args.complib,complevel=args.complevel) as dat_store:
247
+ # If the file exists, we will read it and continue from where we left off
248
+ conv_save = pd.read_hdf(dat_store, 'conv')
249
+ log_file = open('log_kstar1_{0}_kstar2_{1}_SFstart_{2}_SFduration_{3}_metallicity_{4}.txt'.format(kstar1_range_string, kstar2_range_string, sampling['SF_start'], sampling['SF_duration'], sampling['metallicity']), 'a')
250
+ log_file.write('There are already: '+str(conv_save.shape[0])+' '+kstar1_range_string+'_'+kstar2_range_string+' binaries evolved\n')
251
+ log_file.write('\n')
252
+ total_mass_singles = np.max(pd.read_hdf(dat_store, 'mass_singles'))[0]
253
+ total_mass_binaries = np.max(pd.read_hdf(dat_store, 'mass_binaries'))[0]
254
+ total_mass_stars = np.max(pd.read_hdf(dat_store, 'mass_stars'))[0]
255
+ total_n_singles = np.max(pd.read_hdf(dat_store, 'n_singles'))[0]
256
+ total_n_binaries = np.max(pd.read_hdf(dat_store, 'n_binaries'))[0]
257
+ total_n_stars = np.max(pd.read_hdf(dat_store, 'n_stars'))[0]
258
+ idx = int(np.max(pd.read_hdf(dat_store, 'idx'))[0])
259
+ except:
260
+ #dat_store = pd.HDFStore('dat_kstar1_{0}_kstar2_{1}_SFstart_{2}_SFduration_{3}_metallicity_{4}.h5'.format(kstar1_range_string, kstar2_range_string, sampling['SF_start'], sampling['SF_duration'], sampling['metallicity']),complib=args.complib,complevel=args.complevel)
261
+ conv_save = pd.DataFrame()
262
+ total_mass_singles = 0
263
+ total_mass_binaries = 0
264
+ total_mass_stars = 0
265
+ total_n_singles = 0
266
+ total_n_binaries = 0
267
+ total_n_stars = 0
268
+ idx = 0
269
+ log_file = open('log_kstar1_{0}_kstar2_{1}_SFstart_{2}_SFduration_{3}_metallicity_{4}.txt'.format(kstar1_range_string, kstar2_range_string, sampling['SF_start'], sampling['SF_duration'], sampling['metallicity']), 'w')
270
+
271
+ # save configuration settings and COSMIC version to output file
272
+ with h5.File(dat_store_fname, 'a') as f:
273
+ f["config"] = json.dumps({'BSEDict' : BSEDict, 'filters' : filters, 'convergence' : convergence,
274
+ 'sampling' : sampling, 'rand_seed': seed_int})
275
+ f.attrs['COSMIC_version'] = __version__
276
+
277
+ # Initialize the step counter and convergence array/list
278
+ Nstep = idx - np.mod(idx, args.Nstep)
279
+ match = np.zeros(len(convergence['convergence_params']))
280
+
281
+ # Simulate the fixed population
282
+ # This process is illustrated in Fig 1 of Breivik & Larson (2018)
283
+ steps = 0
284
+ bcm_filter_match = []
285
+ bpp_filter_match = []
286
+ initC_filter_match = []
287
+
288
+ # Warn about qmin and m2_min
289
+ if (hasattr(args, 'qmin')) & (hasattr(args, 'm2_min')):
290
+ warnings.warn(f"You have specified both qmin and m2_min. COSMIC will use qmin={args.qmin} to "
291
+ "determine the secondary masses in the initial sample.")
292
+
293
+ log_file.write("You have specified both qmin and m2_min.\n")
294
+ log_file.write("COSMIC will use qmin={} to determine the secondary masses in the initial sample.\n".format(args.qmin))
295
+
296
+ while (Nstep < args.Niter) & (np.max(match) > convergence['match']) & ((time.time() - start_time) < args.max_wall_time):
297
+ # Set random seed such that each iteration gets a unique, determinable seed
298
+ rand_seed = seed_int + Nstep
299
+ np.random.seed(rand_seed)
300
+
301
+ # Select the initial binary sample method from user input
302
+ if sampling['sampling_method'] == 'independent':
303
+ if hasattr(args,'qmin') and hasattr(args,'m2_min'):
304
+ raise ValueError("You cannot specify both qmin and m2_min in the inifile if you are using the independent sampler. Please choose one or the other.")
305
+ # If qmin is specified, use it to sample the initial binary table
306
+ if hasattr(args,'qmin'):
307
+ init_samp_list = InitialBinaryTable.sampler(format_ = sampling['sampling_method'],
308
+ final_kstar1 = kstar1_range,
309
+ final_kstar2 = kstar2_range,
310
+ binfrac_model = args.binfrac_model,
311
+ primary_model = args.primary_model,
312
+ ecc_model = args.ecc_model,
313
+ porb_model = args.porb_model,
314
+ keep_singles = args.keep_singles,
315
+ SF_start = sampling['SF_start'],
316
+ SF_duration = sampling['SF_duration'],
317
+ met = sampling['metallicity'],
318
+ size = args.Nstep,
319
+ qmin = args.qmin,
320
+ params = args.inifile)
321
+ # if m2_min is specified, use it to sample the initial binary table
322
+ elif hasattr(args,'m2_min'):
323
+ init_samp_list = InitialBinaryTable.sampler(format_ = sampling['sampling_method'],
324
+ final_kstar1 = kstar1_range,
325
+ final_kstar2 = kstar2_range,
326
+ binfrac_model = args.binfrac_model,
327
+ primary_model = args.primary_model,
328
+ ecc_model = args.ecc_model,
329
+ porb_model = args.porb_model,
330
+ keep_singles = args.keep_singles,
331
+ SF_start = sampling['SF_start'],
332
+ SF_duration = sampling['SF_duration'],
333
+ met = sampling['metallicity'],
334
+ size = args.Nstep,
335
+ m2_min = args.m2_min,
336
+ params = args.inifile)
337
+ else:
338
+ raise ValueError("You must specify either qmin or m2_min in the",
339
+ " inifile if you are using the independent sampler")
340
+ IBT, mass_singles, mass_binaries, n_singles, n_binaries = init_samp_list
341
+
342
+ if sampling['sampling_method'] == 'multidim':
343
+ init_samp_list = InitialBinaryTable.sampler(format_ = sampling['sampling_method'],
344
+ final_kstar1 = kstar1_range,
345
+ final_kstar2 = kstar2_range,
346
+ rand_seed = rand_seed,
347
+ nproc = args.nproc,
348
+ SF_start = sampling['SF_start'],
349
+ SF_duration = sampling['SF_duration'],
350
+ met = sampling['metallicity'],
351
+ size = args.Nstep,
352
+ pool=pool,
353
+ keep_singles = args.keep_singles
354
+ )
355
+ IBT, mass_singles, mass_binaries, n_singles, n_binaries = init_samp_list
356
+
357
+ # Log the total sampled mass from the initial binary sample
358
+ # for future Galactic occurence rate calculation
359
+ total_mass_singles += mass_singles
360
+ total_mass_binaries += mass_binaries
361
+ total_mass_stars += mass_singles + mass_binaries
362
+ total_n_singles += n_singles
363
+ total_n_binaries += n_binaries
364
+ total_n_stars += n_singles + 2*n_binaries
365
+
366
+ # Now that we have all these initial conditions
367
+ # let's create an Evolve class and evolve these systems
368
+
369
+ # check what kind of time resolution for the bcm array the user specified
370
+
371
+ # assume none
372
+ dtp = IBT['tphysf'].values
373
+
374
+ # check
375
+ if isinstance(filters['timestep_conditions'], str):
376
+ dtp_inifile = filters['timestep_conditions'].split('=')[-1]
377
+ try:
378
+ dtp = float(dtp_inifile)
379
+ except:
380
+ pass
381
+ filters['timestep_conditions'] = []
382
+
383
+ # Create a pool
384
+ bpp, bcm, initCond, kick_info = Evolve.evolve(initialbinarytable=IBT,
385
+ pool=pool,
386
+ BSEDict=BSEDict,
387
+ idx=idx,
388
+ dtp=dtp,
389
+ timestep_conditions=filters['timestep_conditions'])
390
+
391
+ # extract single stars
392
+
393
+ if (args.keep_singles==True):
394
+ singles_bin_num = initCond.loc[initCond.kstar_2 == 15].bin_num.unique()
395
+ # get the singles from the bcm and bpp arrays
396
+ bcm_singles = bcm.loc[bcm.bin_num.isin(singles_bin_num)]
397
+ bpp_singles = bpp.loc[bpp.bin_num.isin(singles_bin_num)]
398
+ initCond_singles = initCond.loc[initCond.bin_num.isin(singles_bin_num)]
399
+ kick_info_singles = kick_info.loc[kick_info.bin_num.isin(singles_bin_num)]
400
+
401
+ bpp = bpp.loc[~bpp.bin_num.isin(singles_bin_num)]
402
+ bcm = bcm.loc[~bcm.bin_num.isin(singles_bin_num)]
403
+ initCond = initCond.loc[~initCond.bin_num.isin(singles_bin_num)]
404
+ kick_info = kick_info.loc[~kick_info.bin_num.isin(singles_bin_num)]
405
+ # get any nans and pull them out for now
406
+ nans = np.isnan(bpp.sep)
407
+ if nans.any():
408
+ nan_bin_nums = np.unique(bpp[nans]["bin_num"].values)
409
+ initCond_nan = initCond.loc[initCond.bin_num.isin(nan_bin_nums)]
410
+ with pd.HDFStore(dat_store_fname,complib=args.complib,complevel=args.complevel) as dat_store:
411
+ if pd.__version__<="2.0.0":
412
+ dat_store.append("nan_initC", initCond_nan)
413
+ else:
414
+ dat_store["nan_initC"] = initCond_nan
415
+ log_file.write(f"There are {len(nan_bin_nums)} NaNs stored in the datfile with key: 'nan_initC'\n")
416
+ log_file.write(f"You might want to check them out carefully to see if there is something that impacts your results\n")
417
+ #log_file.write(f"These NaNs likely arise because you have pts1 = 0.001, try running with pts1 = 0.01")
418
+
419
+ bcm = bcm.loc[~bcm.bin_num.isin(nan_bin_nums)]
420
+ bpp = bpp.loc[~bpp.bin_num.isin(nan_bin_nums)]
421
+ initCond = initCond.loc[~initCond.bin_num.isin(nan_bin_nums)]
422
+ kick_info = kick_info.loc[~kick_info.bin_num.isin(nan_bin_nums)]
423
+
424
+ # Keep track of the index
425
+ idx = int(bcm.bin_num.max()+1)
426
+
427
+ # If dtp is not set, filter out first timestep in bcm
428
+ if np.all(dtp == IBT['tphysf'].values):
429
+ bcm = bcm.loc[bcm['tphys'].isin(dtp)]
430
+ if (args.keep_singles==True):
431
+ bcm_singles = bcm_singles.loc[bcm_singles['tphys'].isin(dtp)]
432
+
433
+ # Now get the converging population and bin_nums for conv systems whic
434
+ # satisfy the convergence_limits
435
+ conv_filter, conv_lims_bin_num = utils.conv_select(bcm, bpp,
436
+ kstar1_range, kstar2_range,
437
+ convergence['pop_select'],
438
+ convergence['convergence_limits'])
439
+ if (args.keep_singles==True):
440
+ conv_singles_filter = utils.conv_select_singles(bcm_singles, bpp_singles, kstar1_range)
441
+
442
+ if conv_filter.empty:
443
+ warnings.warn("After filtering for desired convegence systems there were no systems matching your request. It is possible you are suggesting incompatible bin_state choices and pop_select, e.g. bin_state=[0,1], pop_select='disruption'")
444
+ log_file.write("After filtering for desired convegence systems there were no systems matching your request. It is possible you are suggesting incompatible bin_state choices and pop_select, e.g. bin_state=[0,1], pop_select='disruption'")
445
+
446
+ bcm_filter = bcm.loc[bcm.bin_num.isin(conv_filter.bin_num)]
447
+ bpp_filter = bpp.loc[bpp.bin_num.isin(conv_filter.bin_num)]
448
+ initC_filter = initCond.loc[initCond.bin_num.isin(conv_filter.bin_num)]
449
+ kick_info_filter = kick_info.loc[kick_info.bin_num.isin(conv_filter.bin_num)]
450
+
451
+ if (args.keep_singles==True):
452
+ bpp_singles_filter = bpp_singles.loc[bpp_singles.bin_num.isin(conv_singles_filter.bin_num)]
453
+ bcm_singles_filter = bcm_singles.loc[bcm_singles.bin_num.isin(conv_singles_filter.bin_num)]
454
+ initC_singles_filter = initCond_singles.loc[initCond_singles.bin_num.isin(conv_singles_filter.bin_num)]
455
+ kick_info_singles_filter = kick_info_singles.loc[kick_info_singles.bin_num.isin(conv_singles_filter.bin_num)]
456
+
457
+ # Filter the bin_state based on user specified filters
458
+ bcm_filter, bin_state_nums = utils.filter_bin_state(bcm_filter, bpp_filter, filters, kstar1_range, kstar2_range)
459
+ if bcm_filter.empty:
460
+ warnings.warn("After filtering the bcm array for desired systems there were no systems matching your request. It is possible you should up to the number of binaries provessed in each iteration, i.e. Nstep")
461
+ log_file.write("After filtering the bcm array for desired systems there were no systems matching your request. It is possible you should up to the number of binaries provessed in each iteration, i.e. Nstep\n")
462
+ initC_filter = initC_filter.loc[initC_filter.bin_num.isin(bcm_filter.bin_num)]
463
+ kick_info_filter = kick_info_filter.loc[kick_info_filter.bin_num.isin(bcm_filter.bin_num)]
464
+ bpp_filter = bpp_filter.loc[bpp_filter.bin_num.isin(bcm_filter.bin_num)]
465
+ conv_filter = conv_filter.loc[conv_filter.bin_num.isin(bcm_filter.bin_num)]
466
+
467
+ if convergence['apply_convergence_limits'] == True:
468
+ initC_filter = initC_filter.loc[initC_filter.bin_num.isin(conv_lims_bin_num)]
469
+ kick_info_filter = kick_info_filter.loc[kick_info_filter.bin_num.isin(conv_lims_bin_num)]
470
+ bpp_filter = bpp_filter.loc[bpp_filter.bin_num.isin(conv_lims_bin_num)]
471
+
472
+ conv_filter = conv_filter.loc[conv_filter.bin_num.isin(conv_lims_bin_num)]
473
+
474
+ # Filter the bcm and bpp arrays according to user specified filters
475
+ bcm_filter_match = bcm_filter.copy()
476
+ bpp_filter_match = bpp_filter.copy()
477
+ initC_filter_match = initC_filter.copy()
478
+ conv_filter_match = conv_filter.copy()
479
+ kick_info_filter_match = kick_info_filter.copy()
480
+ if (args.keep_singles==True):
481
+ conv_singles_filter_match = conv_singles_filter.copy()
482
+ bpp_singles_filter_match = bpp_singles_filter.copy()
483
+ bcm_singles_filter_match = bcm_singles_filter.copy()
484
+ initC_filter_singles_match = initC_singles_filter.copy()
485
+ kick_info_singles_filter_match = kick_info_singles_filter.copy()
486
+
487
+ if len(conv_filter_match) >= np.min([50, args.Niter]):
488
+ conv_save = pd.concat([conv_save, pd.DataFrame(conv_filter_match)], ignore_index=True)
489
+
490
+ # perform the convergence
491
+ if len(conv_save) == len(conv_filter_match):
492
+ match = Match.perform_convergence(convergence['convergence_params'],
493
+ conv_save,
494
+ conv_filter_match,
495
+ log_file)
496
+ else:
497
+ match = Match.perform_convergence(convergence['convergence_params'],
498
+ conv_save,
499
+ conv_save.loc[~conv_save.bin_num.isin(conv_filter_match.bin_num)],
500
+ log_file)
501
+
502
+ match_save = pd.DataFrame(np.atleast_2d(match), columns = convergence['convergence_params'])
503
+
504
+ # write the data and the logs!
505
+ mass_list = [total_mass_singles, total_mass_binaries, total_mass_stars]
506
+ n_list = [total_n_singles, total_n_binaries, total_n_stars]
507
+
508
+ # write the data to the dat_store
509
+ with pd.HDFStore(dat_store_fname,complib=args.complib,complevel=args.complevel) as dat_store:
510
+ if (args.keep_singles==True):
511
+ utils.pop_write(dat_store, log_file, mass_list, n_list, bcm_filter_match,
512
+ bpp_filter_match, initC_filter_match, conv_filter_match, kick_info_filter_match,
513
+ bin_state_nums, match_save, idx,
514
+ conv_singles=conv_singles_filter_match, bcm_singles=bcm_singles_filter_match,
515
+ bpp_singles=bpp_singles_filter_match, initC_singles=initC_filter_singles_match,
516
+ kick_info_singles=kick_info_singles_filter_match)
517
+ else:
518
+ utils.pop_write(dat_store, log_file, mass_list, n_list, bcm_filter_match,
519
+ bpp_filter_match, initC_filter_match, conv_filter_match, kick_info_filter_match,
520
+ bin_state_nums, match_save, idx)
521
+
522
+ # reset the bcm_filter DataFrame
523
+ bcm_filter_match = []
524
+ bpp_filter_match = []
525
+ initC_filter_match = []
526
+ conv_filter_match = []
527
+ kick_info_filter_match = []
528
+ if (args.keep_singles==True):
529
+ conv_singles_filter_match = []
530
+ bpp_singles_filter_match = []
531
+ bcm_singles_filter_match = []
532
+ initC_filter_singles_match = []
533
+ kick_info_singles_filter_match = []
534
+ log_file.write('\n')
535
+ Nstep += args.Nstep
536
+ log_file.flush()
537
+
538
+ # close the log file and print the final message
539
+ log_file.write('All done friend!')
540
+ log_file.close()
541
+
542
+ pool.close()
543
+ pool.join()
544
+
@@ -0,0 +1,55 @@
1
+ Metadata-Version: 2.1
2
+ Name: cosmic-popsynth
3
+ Version: 3.6.2
4
+ Summary: a Python-interfaced binary population synthesis suite
5
+ Author: Scott Coughlin, Michael Zevin, Carl L. Rodriguez, Tom Wagg
6
+ Author-Email: Katelyn Breivik <katie.breivik@gmail.com>
7
+ License: MIT License
8
+ Classifier: Development Status :: 5 - Production/Stable
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: Intended Audience :: Science/Research
11
+ Classifier: License :: OSI Approved :: MIT License
12
+ Classifier: Operating System :: OS Independent
13
+ Classifier: Programming Language :: Python
14
+ Requires-Python: >=3.10
15
+ Requires-Dist: numpy
16
+ Requires-Dist: scipy
17
+ Requires-Dist: astropy
18
+ Requires-Dist: configparser
19
+ Requires-Dist: tqdm
20
+ Requires-Dist: pandas
21
+ Requires-Dist: tables
22
+ Requires-Dist: h5py
23
+ Requires-Dist: schwimmbad
24
+ Requires-Dist: matplotlib
25
+ Description-Content-Type: text/markdown
26
+
27
+ # COSMIC
28
+ COSMIC (Compact Object Synthesis and Monte Carlo Investigation Code)
29
+
30
+ COSMIC is a rapid binary population synthesis suite with a special purpose of generating realistic compact binary populations. COSMIC can also be used to evolve binaries with BSE in a python environment.
31
+
32
+ COSMIC currently implements binary evolutionary processes using BSE ([Hurley+2002](https://ui.adsabs.harvard.edu/abs/2002MNRAS.329..897H/abstract)). Several modifications have been applied to BSE to account for recent updates to binary evolution especially important to compact binary formation (e.g. metallicity-dependent stellar winds or black hole natal kick strengths). For a detailed discussion of these modifications, see [Breivik et al 2020](https://ui.adsabs.harvard.edu/abs/2020ApJ...898...71B/abstract))
33
+
34
+ <https://cosmic-popsynth.github.io/>
35
+
36
+ # Release Status
37
+
38
+ [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3561144.svg)](https://doi.org/10.5281/zenodo.3561144)
39
+
40
+
41
+ [![PyPI version](https://badge.fury.io/py/cosmic-popsynth.svg)](https://badge.fury.io/py/cosmic-popsynth)
42
+
43
+
44
+ # Installation
45
+
46
+ The easiest method to install cosmic is using pip directly:
47
+
48
+ ```
49
+ pip install cosmic-popsynth
50
+ ```
51
+
52
+ [![Unit Test COSMIC](https://github.com/COSMIC-PopSynth/COSMIC/actions/workflows/python-package.yml/badge.svg)](https://github.com/COSMIC-PopSynth/COSMIC/actions/workflows/python-package.yml)
53
+
54
+ [![codecov](https://codecov.io/gh/COSMIC-PopSynth/COSMIC/branch/develop/graph/badge.svg)](https://codecov.io/gh/COSMIC-PopSynth/COSMIC)
55
+
@@ -0,0 +1,38 @@
1
+ cosmic_popsynth-3.6.2.dist-info/RECORD,,
2
+ cosmic_popsynth-3.6.2.dist-info/WHEEL,sha256=pfpJ73u9f5n4jdRUwLfi5e7HknOIECF6VOOE9OTuurY,122
3
+ cosmic_popsynth-3.6.2.dist-info/METADATA,sha256=3waC7hjr6XTPilHD0UkDCFrchSblDn5yjQ7wfWwJ8Lg,2366
4
+ cosmic/_evolvebin.cpython-313-darwin.so,sha256=aajTT5N7LAG2RutawRrWpJGk-81Ck5u9DtgIajNQN3c,482128
5
+ cosmic/test_utils.py,sha256=zb1ukhM8XzmqC-080XVe6Qf4cdNfdQwqB1KsVyQGHMc,10991
6
+ cosmic/evolve.py,sha256=gTDOskkrleJSNYFqDtB0WQO_LcM2KN62Yo6UISaOgIE,29745
7
+ cosmic/test_sample.py,sha256=RGcxCYybhWnpzwPOkbw4QTs7hQ5UT0n5ugs-mbFGTG0,27653
8
+ cosmic/_version.py,sha256=JuDKlvRQpMKwL3bc24_MxmGe7_wOb6HCm8Kz4Ba4dac,22
9
+ cosmic/get_commit_hash.py,sha256=R7eN590I9Xvf558DrieWqYVGI6IpIO2V3GDVuH9CdwU,539
10
+ cosmic/plotting.py,sha256=yaOVbV8yTMqNZL1o-y_1IYkwCwyuEqyu4NJPMlkc-QI,18862
11
+ cosmic/checkstate.py,sha256=Eyb6KYcEEKDCI3t-epShXAqaeTq2oT0KfOlviGndxmc,4580
12
+ cosmic/__init__.py,sha256=mPlQFBqAI80Ckz-cSOOayPcD1LM_kfpMUjpFx2RqaC4,1111
13
+ cosmic/test_match.py,sha256=newjWr01eANC-9FIsAG9Gb8oOmvw4fpvZY0or76_vik,708
14
+ cosmic/_commit_hash.py,sha256=nMcaC9cNnSVBJV-E_N3PCKFi09ZGcCxufkU5BURztHc,57
15
+ cosmic/utils.py,sha256=onuTyrPqvepZsYjkdMJRZHRyKlpAnFcWizeiePs_SxE,52372
16
+ cosmic/test_evolve.py,sha256=L7Rhc9yNV536u9yzq25IQVXnGpVVaTfATYbeK9fhEgg,5661
17
+ cosmic/filter.py,sha256=uJdSuzcHavQ2mOd8vNsyMPQUA2krssz0Th5L1s3KG0A,6644
18
+ cosmic/output.py,sha256=2U3vWJTR-ceD2weJHcydWF0p5tFn51yesCYf0U45tQU,20266
19
+ cosmic/Match.py,sha256=n3AAFJdzqUCCuv6QgVZ9a710_VS0y_J_2KzJkeJcCdU,6466
20
+ cosmic/.dylibs/libgfortran.5.dylib,sha256=LlOvbI4bFMMjTzn9Ta4WPtlgyNF7pcV5ewURYJCbNFo,1901520
21
+ cosmic/.dylibs/libquadmath.0.dylib,sha256=0j2dWWRFLakacuRP24UNB0ZpT8pcqIC94DaOzGg4FQY,363936
22
+ cosmic/.dylibs/libgcc_s.1.1.dylib,sha256=w3Uk4EwM2_iu9pIa3fgVT1WgoVl6DQtWAlnX-UA6ruQ,183040
23
+ cosmic/bse_utils/__init__.py,sha256=hfPCjPwlZoPkzdxBkT-jHQ_cLf_EoMPEwYqKmhrUFCI,740
24
+ cosmic/bse_utils/zcnsts.py,sha256=-bWhm8EHX_ndOrpE9VkHV7HoHBEFxHWsE0HBtIzWygI,22271
25
+ cosmic/bse_utils/zdata.py,sha256=P-mIAgR6TkM3Y_lJJB_14Ixox8xQk_oST2qNmbWvuSE,12884
26
+ cosmic/sample/__init__.py,sha256=4brmNSsRpbUpygR_zZH643h3Ng2MZH6rtaLodQjx--s,934
27
+ cosmic/sample/initialbinarytable.py,sha256=lY1Cp8Wp4OQqEeAM1SqzYMJLvsSUllzech01tJC059s,8266
28
+ cosmic/sample/initialcmctable.py,sha256=E37UW78ZqJVVQLB82aY2SYuUFN0-OgAy48ye12ryiaw,15153
29
+ cosmic/sample/cmc/elson.py,sha256=7vUdhwseIBtjmcC81sdgErymwuefZvIgw3hB07sRNSo,13488
30
+ cosmic/sample/cmc/__init__.py,sha256=InwIN6eI6q5BnfF8-VX3Ep8gxjtsOB9QdLmssKLs1uo,740
31
+ cosmic/sample/cmc/king.py,sha256=-56VTWLcsDQaDT96Z7acIJYAHkcgOhEjixyg2Y4Mk0M,8553
32
+ cosmic/sample/sampler/__init__.py,sha256=d7kSZRoxZgEa_5-6370YVxOqRY2N5Yzk9Ql2bdVFLKs,965
33
+ cosmic/sample/sampler/independent.py,sha256=SsQSTLF9TI_dlE9dONjl2ZfVJx6l6hEiVu_wfQcDV-Y,56844
34
+ cosmic/sample/sampler/multidim.py,sha256=AwrWki8WJnAdN40teoaFHNWUW_i3AZwn4lkqlxjQNdM,35212
35
+ cosmic/sample/sampler/cmc.py,sha256=sBHk4aju2M57ddWCOjD5pgke_zW2BMsYbNffzUhLIn4,15929
36
+ cosmic/sample/sampler/sampler.py,sha256=tO8wf8IwMcoXdHur5WC4OLgYbv468rHkeR43mjkKOK4,3953
37
+ cosmic/data/cosmic-settings.json,sha256=r-Qsuov-TCrBWsnuf5doaVqa0FqoLxsi1XuO6xxBDTw,86849
38
+ cosmic_popsynth-3.6.2.data/scripts/cosmic-pop,sha256=7YJYiuyq9qp_YMWrEL7LR2nyQKu8x4rNwCq75F6SuLc,30765
@@ -0,0 +1,6 @@
1
+ Wheel-Version: 1.0
2
+ Generator: meson
3
+ Root-Is-Purelib: false
4
+ Tag: cp313-cp313-macosx_14_0_arm64
5
+ Generator: delocate 0.13.0
6
+