@@ -494,24 +494,23 @@ def append_rates(path, filename, detection_rate, formation_rate, merger_rate, re
494494 maxz --> [float] Maximum redshhift up to where we would like to store the data
495495 sensitivity --> [string] Which detector sensitivity you used to calculate rates
496496 dco_type --> [string] Which DCO type you used to calculate rates
497- mu0 --> [float] Parameter used for calculating metallicity density dist
498- muz --> [float] Parameter used for calculating metallicity density dist
499- sigma0 --> [float] Parameter used for calculating metallicity density dist
500- sigmaz --> [float] Parameter used for calculating metallicity density dist
501- alpha --> [float] Parameter used for calculating metallicity density dist
497+ mu0 --> [float] Parameter used for calculating metallicity density dist
498+ muz --> [float] Parameter used for calculating metallicity density dist
499+ sigma0 --> [float] Parameter used for calculating metallicity density dist
500+ sigmaz --> [float] Parameter used for calculating metallicity density dist
501+ alpha --> [float] Parameter used for calculating metallicity density dist
502502
503503 remove_group --> [Bool] if you completely want to remove this group from the hdf5 file, set this to True
504504 append_binned_by_z --> [Bool] to save space, bin rates by redshiftbin and append binned rates
505+ redshift_binsize --> [float] if append_binned_by_z, how big should your redshift bin be
505506
506507 Returns:
507- h_new --> [hdf5 file] Compas output file with a new group "rates" with the same shape as DoubleCompactObjects
508+ h_new --> [hdf5 file] Compas output file with a new group "rates" with the same shape as DoubleCompactObjects x redshifts
508509 """
509510 print ('shape redshifts' , np .shape (redshifts ))
510511 print ('shape COMPAS.sw_weights' , np .shape (COMPAS .sw_weights ) )
511512 print ('shape COMPAS COMPAS.DCO_masks["BHBH"]]' , np .shape (COMPAS .sw_weights [COMPAS .DCO_masks [dco_type ]]) )
512513
513- n_redshifts_detection = int (max_redshift_detection / redshift_step )
514-
515514 #################################################
516515 #Open hdf5 file that we will write on
517516 print ('filename' , filename )
@@ -533,10 +532,9 @@ def append_rates(path, filename, detection_rate, formation_rate, merger_rate, re
533532
534533
535534 ######################s###########################
536- # If you just want to get rid of this point
537- print ('remove_group' , remove_group )
535+ # If you just want to get rid of this group, remove and exit
538536 if remove_group :
539- print ('You really want to remove this group fromm the hdf5 file, removing now..' )
537+ print ('You want to remove this group, %s, from the hdf5 file, removing now..' % ( new_rate_group ) )
540538 del h_new [new_rate_group ]
541539 return
542540
@@ -561,7 +559,7 @@ def append_rates(path, filename, detection_rate, formation_rate, merger_rate, re
561559 # binned_merger_rate will be the (observed) weights, binned by redshhift
562560 binned_merger_rate = np .zeros ( (N_dco , len (redshift_bins )- 1 ) )# create an empty list to fill
563561 binned_detection_rate = np .zeros ( (N_dco , len (redshift_bins )- 1 ) )# create an empty list to fill
564- print ( 'x' , range ( 0 , len ( redshift_bins ) - 1 ))
562+
565563 # loop over all redshift redshift_bins
566564 for i in range (len (redshift_bins )- 1 ):
567565 binned_merger_rate [:,i ] = np .sum (merger_rate [:, digitized == i + 1 ], axis = 1 )
@@ -573,10 +571,13 @@ def append_rates(path, filename, detection_rate, formation_rate, merger_rate, re
573571 save_detection_rate = binned_detection_rate
574572
575573 else :
576- # We don't really wan't All the data, so we're going to save only up to some redshift
574+ # To avoid huge filesizes, we don't really wan't All the data,
575+ # so we're going to save up to some redshift
577576 z_index = np .digitize (maxz , redshifts ) - 1
577+
578578 # The detection_rate is a smaller array, make sure you don't go beyond the end
579579 detection_index = z_index if z_index < n_redshifts_detection else n_redshifts_detection
580+
580581 print ('You will only save data up to redshift ' , maxz , ', i.e. index' , redshifts [z_index ])
581582 save_redshifts = redshifts
582583 save_merger_rate = merger_rate [:,:z_index ]
@@ -587,8 +588,7 @@ def append_rates(path, filename, detection_rate, formation_rate, merger_rate, re
587588 #################################################
588589 # Write the rates as a seperate dataset
589590 # re-arrange your list of rate parameters
590- DCO_to_rate_mask = COMPAS .DCO_masks [dco_type ]
591- print ('DCO_to_rate_mask' , DCO_to_rate_mask )
591+ DCO_to_rate_mask = COMPAS .DCO_masks [dco_type ] #save this bool for easy conversion between DoubleCompactObjects, and CI weights
592592 rate_data_list = [DCO ['SEED' ][DCO_to_rate_mask ], DCO_to_rate_mask , save_redshifts , save_merger_rate , merger_rate [:,0 ], save_detection_rate ]
593593 rate_list_names = ['SEED' , 'DCO_mask' , 'redshifts' , 'merger_rate' ,'merger_rate_z0' , 'detection_rate' + sensitivity ]
594594 for i , data in enumerate (rate_data_list ):
@@ -599,10 +599,9 @@ def append_rates(path, filename, detection_rate, formation_rate, merger_rate, re
599599 # write rates as a new data set
600600 dataNew = h_new [new_rate_group ].create_dataset (rate_list_names [i ], data = data )
601601
602- #Always close your files again
602+ #Always close your files again ;)
603603 h_new .close ()
604- print ('Done :) your new files are here: ' , path + '/' + filename )
605-
604+ print ('Done with append_rates :) your new files are here: ' , path + '/' + filename )
606605
607606
608607
@@ -744,11 +743,13 @@ def plot_rates(save_dir, formation_rate, merger_rate, detection_rate, redshifts,
744743 #####################################
745744 # Append your freshly calculated merger rates to the hdf5 file
746745 start_append = time .time ()
747- append_rates (args .path , args .fname , detection_rate , formation_rate , merger_rate , redshifts , COMPAS ,
748- maxz = args .max_redshift_detection , sensitivity = args .sensitivity , dco_type = args .dco_type , mu0 = args .mu0 , muz = args .muz , alpha = args .alpha , sigma0 = args .sigma0 , sigmaz = args .sigmaz ,
749- remove_group = False , max_redshift_detection = args .max_redshift_detection , redshift_step = args .redshift_step , append_binned_by_z = False , redshift_binsize = 0.05 )
746+ n_redshifts_detection = int (args .max_redshift_detection / args .redshift_step )
747+ append_rates (args .path , args .fname , detection_rate , formation_rate , merger_rate , redshifts , COMPAS , n_redshifts_detection ,
748+ maxz = args .max_redshift_detection , sensitivity = args .sensitivity , dco_type = args .dco_type , mu0 = args .mu0 , muz = args .muz , sigma0 = args .sigma0 , sigmaz = args .sigmaz , alpha = args .alpha ,
749+ remove_group = False , append_binned_by_z = False , redshift_binsize = 0.05 )
750750 end_append = time .time ()
751751
752+
752753 #####################################
753754 # Plot your result
754755 start_plot = time .time ()
0 commit comments