diff --git a/binarycpython.yml b/binarycpython.yml index 16e276f658c43c42e161f689447ab41ec3055cbc..0c8f4644b7942895a43ca8f2fe82ce81314a9a51 100644 --- a/binarycpython.yml +++ b/binarycpython.yml @@ -9,4 +9,5 @@ windows: layout: tiled panes: - ls - - subl . \ No newline at end of file + - subl . + - cd $BINARYC_DATA_ROOT/ \ No newline at end of file diff --git a/binarycpython/utils/IMF.py b/binarycpython/utils/IMF.py deleted file mode 100644 index d9b2c7de3fe45efd6afa7138846bd0a2ca28a818..0000000000000000000000000000000000000000 --- a/binarycpython/utils/IMF.py +++ /dev/null @@ -1,4 +0,0 @@ -### -# File containing functions to calculate probability of existence of star. Copied and inspired by the perl module IMF.pm or Rob Izzard - -# TODO: add \ No newline at end of file diff --git a/binarycpython/utils/distribution_functions.py b/binarycpython/utils/distribution_functions.py index 60aa9f788e5bcde7a701e83e6504060a15b1278c..278bbef122b138c9927c955fb2fef5bbc4c71ec1 100644 --- a/binarycpython/utils/distribution_functions.py +++ b/binarycpython/utils/distribution_functions.py @@ -6,12 +6,13 @@ import binarycpython.utils.useful_funcs # Mostly copied from the perl modules # TODO: make some things globally present? rob does this in his module. i guess it saves calculations but not sure if im gonna do that now -# TODO: Add the stuff from the IMF file +# TODO: Add the stuff from the IMF file # TODO: call all of these functions to check whether they work # TODO: make global constants stuff # TODO: make description of module submodule -log_ln_converter = (1.0/math.log(10.0)) +log_ln_converter = 1.0 / math.log(10.0) + def flat(parameter): """ @@ -70,18 +71,35 @@ def powerlaw(min_val, max_val, k, x): # ) return y + def calculate_constants_three_part_powerlaw(m0, m1, m2, m_max, p1, p2, p3): # print("Initialising constants for the three-part powerlaw: m0={} m1={} m2={} m_max={} p1={} p2={} p3={}\n".format(m0, m1, m2, m_max, p1, p2, p3)) array_constants_three_part_powerlaw = [0, 0, 0] - array_constants_three_part_powerlaw[1] = ((m1**p2) * (m1**(-p1))) * (1.0/(1.0 + p1)) * (m1**(1.0 + p1) - m0**(1.0 + p1)) - array_constants_three_part_powerlaw[1] += ((m2 ** (1.0 + p2) - m1 ** (1.0+p2))) * (1.0/(1.0+p2)) - array_constants_three_part_powerlaw[1] += ((m2**p2) * (m2**(-p3))) * (1.0/(1.0 + p3)) * (m_max**(1.0+p3) - m2**(1.0+p3)) - array_constants_three_part_powerlaw[1] = 1.0/(array_constants_three_part_powerlaw[1] + 1e-50) - - array_constants_three_part_powerlaw[0] = array_constants_three_part_powerlaw[1] * ((m1**p2)*(m1**(-p1))) - array_constants_three_part_powerlaw[2] = array_constants_three_part_powerlaw[1] * ((m2**p2)*(m2**(-p3))) + array_constants_three_part_powerlaw[1] = ( + ((m1 ** p2) * (m1 ** (-p1))) + * (1.0 / (1.0 + p1)) + * (m1 ** (1.0 + p1) - m0 ** (1.0 + p1)) + ) + array_constants_three_part_powerlaw[1] += ( + (m2 ** (1.0 + p2) - m1 ** (1.0 + p2)) + ) * (1.0 / (1.0 + p2)) + array_constants_three_part_powerlaw[1] += ( + ((m2 ** p2) * (m2 ** (-p3))) + * (1.0 / (1.0 + p3)) + * (m_max ** (1.0 + p3) - m2 ** (1.0 + p3)) + ) + array_constants_three_part_powerlaw[1] = 1.0 / ( + array_constants_three_part_powerlaw[1] + 1e-50 + ) + + array_constants_three_part_powerlaw[0] = array_constants_three_part_powerlaw[1] * ( + (m1 ** p2) * (m1 ** (-p1)) + ) + array_constants_three_part_powerlaw[2] = array_constants_three_part_powerlaw[1] * ( + (m2 ** p2) * (m2 ** (-p3)) + ) return array_constants_three_part_powerlaw # $$array[1]=(($m1**$p2)*($m1**(-$p1)))* @@ -98,6 +116,7 @@ def calculate_constants_three_part_powerlaw(m0, m1, m2, m_max, p1, p2, p3): # #print "ARRAY SET @_ => @$array\n"; # $threepart_powerlaw_consts{"@_"}=[@$array]; + def three_part_powerlaw(M, M0, M1, M2, M_MAX, P1, P2, P3): """ Generalized three-part power law, usually used for mass distributions @@ -105,22 +124,25 @@ def three_part_powerlaw(M, M0, M1, M2, M_MAX, P1, P2, P3): # TODO: add check on whether the values exist - three_part_powerlaw_constants = calculate_constants_three_part_powerlaw(M0, M1, M2, M_MAX, P1, P2, P3) - - # - if (M < M0): - p = 0 # Below 0 - elif (M0 < M <= M1): - p = three_part_powerlaw_constants[0] * (M ** P1) # Between M0 and M1 - elif (M1 < M <= M2): - p = three_part_powerlaw_constants[1] * (M ** P2) # Between M1 and M2 - elif (M2 < M <= M_MAX): - p = three_part_powerlaw_constants[2] * (M ** P3) # Between M2 and M_MAX + three_part_powerlaw_constants = calculate_constants_three_part_powerlaw( + M0, M1, M2, M_MAX, P1, P2, P3 + ) + + # + if M < M0: + p = 0 # Below 0 + elif M0 < M <= M1: + p = three_part_powerlaw_constants[0] * (M ** P1) # Between M0 and M1 + elif M1 < M <= M2: + p = three_part_powerlaw_constants[1] * (M ** P2) # Between M1 and M2 + elif M2 < M <= M_MAX: + p = three_part_powerlaw_constants[2] * (M ** P3) # Between M2 and M_MAX else: - p = 0 # Above M_MAX + p = 0 # Above M_MAX return p + def const(min_bound, max_bound, val=None): """ a constant distribution function between min=$_[0] and max=$_[1] @@ -131,9 +153,10 @@ def const(min_bound, max_bound, val=None): print("out of bounds") p = 0 return p - p = (1.0/(min_bound - max_bound)) + p = 1.0 / (min_bound - max_bound) return p + def set_opts(opts, newopts): """ Function to take a default dict and override it with newer values. @@ -149,6 +172,7 @@ def set_opts(opts, newopts): return opt + def gaussian(x, mean, sigma, gmin, gmax): """ Gaussian distribution function. used for e..g Duquennoy + Mayor 1991 @@ -158,7 +182,7 @@ def gaussian(x, mean, sigma, gmin, gmax): # # location (X value), mean and sigma, min and max range # my ($x,$mean,$sigma,$gmin,$gmax) = @_; - if ((x < gmin) or (x > gmax)): + if (x < gmin) or (x > gmax): p = 0 else: # normalize over given range @@ -168,6 +192,7 @@ def gaussian(x, mean, sigma, gmin, gmax): return p + def gaussian_normalizing_const(mean, sigma, gmin, gmax): """ Function to calculate the normalisation constant for the gaussian @@ -176,29 +201,31 @@ def gaussian_normalizing_const(mean, sigma, gmin, gmax): # First time; calculate multipllier for given mean and sigma ptot = 0 resolution = 1000 - d = (gmax-gmin)/resolution + d = (gmax - gmin) / resolution for i in range(resolution): - y = gmin + i*d + y = gmin + i * d ptot += d * gaussian_func(y, mean, sigma) - # TODO: Set value in global return ptot + def gaussian_func(x, mean, sigma): """ Function to evaluate a gaussian at a given point """ - GAUSSIAN_PREFACTOR = (1.0/math.sqrt(2.0*math.pi)); + GAUSSIAN_PREFACTOR = 1.0 / math.sqrt(2.0 * math.pi) - r = 1.0/(sigma) + r = 1.0 / (sigma) y = (x - mean) * r - return GAUSSIAN_PREFACTOR * r * math.exp(-0.5 * y**2) + return GAUSSIAN_PREFACTOR * r * math.exp(-0.5 * y ** 2) + ##### # Mass distributions -##### +##### + def Kroupa2001(m, newopts=None): """ @@ -208,12 +235,30 @@ def Kroupa2001(m, newopts=None): default = {'m0':0.1, 'm1':0.5, 'm2':1, 'mmax':100, 'p1':-1.3, 'p2':-2.3, 'p3':-2.3} """ - # Default params and override them - default = {'m0':0.1, 'm1':0.5, 'm2':1, 'mmax':100, 'p1':-1.3, 'p2':-2.3, 'p3':-2.3} + # Default params and override them + default = { + "m0": 0.1, + "m1": 0.5, + "m2": 1, + "mmax": 100, + "p1": -1.3, + "p2": -2.3, + "p3": -2.3, + } value_dict = default.copy() if newopts: value_dict.update(newopts) - return three_part_powerlaw(m, value_dict['m0'], value_dict['m1'], value_dict['m2'], value_dict['mmax'], value_dict['p1'], value_dict['p2'], value_dict['p3']) + return three_part_powerlaw( + m, + value_dict["m0"], + value_dict["m1"], + value_dict["m2"], + value_dict["mmax"], + value_dict["p1"], + value_dict["p2"], + value_dict["p3"], + ) + def ktg93(m, newopts): """ @@ -247,11 +292,28 @@ def ktg93(m, newopts): # set options # opts = set_opts({'m0':0.1, 'm1':0.5, 'm2':1.0, 'mmax':80, 'p1':-1.3, 'p2':-2.2, 'p3':-2.7}, newopts) - defaults = {'m0':0.1, 'm1':0.5, 'm2':1.0, 'mmax':80, 'p1':-1.3, 'p2':-2.2, 'p3':-2.7} + defaults = { + "m0": 0.1, + "m1": 0.5, + "m2": 1.0, + "mmax": 80, + "p1": -1.3, + "p2": -2.2, + "p3": -2.7, + } value_dict = default.copy() if newopts: value_dict.update(newopts) - return three_part_powerlaw(m, value_dict['m0'], value_dict['m0'], value_dict['m2'], value_dict['m0'], value_dict['m0'], value_dict['m0'], value_dict['m0']) + return three_part_powerlaw( + m, + value_dict["m0"], + value_dict["m0"], + value_dict["m2"], + value_dict["m0"], + value_dict["m0"], + value_dict["m0"], + value_dict["m0"], + ) # sub ktg93_lnspace @@ -269,6 +331,7 @@ def imf_tinsley1980(m): return three_part_powerlaw(m, 0.1, 2.0, 10.0, 80.0, -2.0, -2.3, -3.3) + def imf_scalo1986(m): """ From Scalo 1986 (defined up until 80Msol) @@ -282,27 +345,29 @@ def imf_scalo1998(m): """ return three_part_powerlaw(m, 0.1, 1.0, 10.0, 80.0, -1.2, -2.7, -2.3) + def imf_chabrier2003(m): """ # IMF of Chabrier 2003 PASP 115:763-795 - """ + """ Chabrier_logmc = math.log10(0.079) - Chabrier_sigma2 = (0.69*0.69) + Chabrier_sigma2 = 0.69 * 0.69 Chabrier_A1 = 0.158 Chabrier_A2 = 4.43e-2 Chabrier_x = -1.3 - if m < 0: + if m < 0: print("below bounds") raise ValueError - if (0 < m < 1.0): + if 0 < m < 1.0: A = 0.158 dm = math.log10(m) - Chabrier_logmc - p = Chabrier_A1 * math.exp(-(dm**2)/(2.0*Chabrier_sigma2)) + p = Chabrier_A1 * math.exp(-(dm ** 2) / (2.0 * Chabrier_sigma2)) else: p = Chabrier_A2 * (m ** Chabrier_x) - p = p/(0.1202462 * m * math.log(10)) + p = p / (0.1202462 * m * math.log(10)) return p + ######################################################################## # Binary fractions ######################################################################## @@ -313,8 +378,11 @@ def Arenou2010_binary_fraction(m): # www.rssd.esa.int/doc_fetch.php?id=2969346 return 0.8388 * math.tanh(0.688 * m + 0.079) + + # print(Arenou2010_binary_fraction(0.4)) + def raghavan2010_binary_fraction(m): """ Fit to the Raghavan 2010 binary fraction as a function of @@ -328,17 +396,21 @@ def raghavan2010_binary_fraction(m): Rob then fitted the result """ - return min(1.0, + return min( + 1.0, max( - (m**0.1) * (5.12310e-01) + (-1.02070e-01), - (1.10450e+00) * (m**(4.93670e-01)) + (-6.95630e-01) - ) - ) + (m ** 0.1) * (5.12310e-01) + (-1.02070e-01), + (1.10450e00) * (m ** (4.93670e-01)) + (-6.95630e-01), + ), + ) + + # print(raghavan2010_binary_fraction(2)) -######################################################################## +######################################################################## # Period distributions -######################################################################## +######################################################################## + def duquennoy1991(x): """ @@ -349,7 +421,8 @@ def duquennoy1991(x): """ return gaussian(x, 4.8, 2.3, -2, 12) -def sana12(M1, M2, a, P, amin, amax, x0, x1, p): # TODO: ? wtf. vague input + +def sana12(M1, M2, a, P, amin, amax, x0, x1, p): # TODO: ? wtf. vague input """ distribution of initial orbital periods as found by Sana et al. (2012) which is a flat distribution in ln(a) and ln(P) respectively for stars @@ -365,39 +438,43 @@ def sana12(M1, M2, a, P, amin, amax, x0, x1, p): # TODO: ? wtf. vague input res = 0 - - if (M1 < 15) or (M2/M1 < 0.1): - res = (1.0/(math.log(amax)-math.log(amin))) + if (M1 < 15) or (M2 / M1 < 0.1): + res = 1.0 / (math.log(amax) - math.log(amin)) else: p1 = 1.0 + p # For more details see the LyX document of binary_c for this distribution where the variables and normalizations are given - # we use the notation x=log(P), xmin=log(Pmin), x0=log(P0), ... to determine the + # we use the notation x=log(P), xmin=log(Pmin), x0=log(P0), ... to determine the x = log_ln_converter * math.log(p) xmin = log_ln_converter * math.log(calc_period_from_sep(m1, m2, amin)) xmin = log_ln_converter * math.log(calc_period_from_sep(m1, m2, amax)) - #my $x0 = 0.15; - #my $x1 = 3.5; + # my $x0 = 0.15; + # my $x1 = 3.5; - A1 = 1.0/(x0**p * (x0-xmin) + (x1**p1 -x0**p1)/p1 + x1**p * (xmax-x1)) - A0 = A1 * x0**p - A2 = A1 * x1**p + A1 = 1.0 / ( + x0 ** p * (x0 - xmin) + (x1 ** p1 - x0 ** p1) / p1 + x1 ** p * (xmax - x1) + ) + A0 = A1 * x0 ** p + A2 = A1 * x1 ** p - if (x < x0): - res = 3.0/2.0 * log_ln_converter * A0 - elif (x > x1): - res = 3.0/2.0 * log_ln_converter * A2 + if x < x0: + res = 3.0 / 2.0 * log_ln_converter * A0 + elif x > x1: + res = 3.0 / 2.0 * log_ln_converter * A2 else: - res = 3.0/2.0 * log_ln_converter * A1 * x**p + res = 3.0 / 2.0 * log_ln_converter * A1 * x ** p return res + + # print(sana12(10, 2, 10, 100, 1, 1000, math.log(10), math.log(1000), 6)) ######################################################################## # Mass ratio distributions ######################################################################## + def flatsections(x, opts): """ @@ -410,14 +487,16 @@ def flatsections(x, opts): y = 0 for opt in opts: - dc = (opt['max'] - opt['min'])*opt['height'] + dc = (opt["max"] - opt["min"]) * opt["height"] # print("added flatsection ({}-{})*{} = {}\n".format(opt['max'], opt['min'], opt['height'], dc)) c += dc - if (opt['min'] <= x <= opt['max']): - y = opt['height'] + if opt["min"] <= x <= opt["max"]: + y = opt["height"] # print("Use this\n") - c = 1.0/c + c = 1.0 / c y = y * c # print("flatsections gives C={}: y={}\n",c,y) return y + + # print(flatsections(1, [{'min': 0, 'max': 2, 'height': 3}])) diff --git a/binarycpython/utils/functions.py b/binarycpython/utils/functions.py index 779f0f7a2c9a1cff884af07fb7635814790f6f1b..71590f1202f52168fcc0df66d7b56c07d9891828 100644 --- a/binarycpython/utils/functions.py +++ b/binarycpython/utils/functions.py @@ -10,6 +10,7 @@ from binarycpython.utils.custom_logging_functions import ( create_and_load_logging_function, ) + def parse_binary_c_version_info(version_info_string): version_info_dict = {} diff --git a/binarycpython/utils/grid.py b/binarycpython/utils/grid.py index baa5df9968c67fe2fe4b8e5f0d5981c2b213a9ec..fb24d0661b9496b7f611be79e72956f1e8c30371 100644 --- a/binarycpython/utils/grid.py +++ b/binarycpython/utils/grid.py @@ -193,7 +193,7 @@ class Population(object): parameter_name, condition=None, ): - """ + """spec Function to add grid variables to the grid_options. TODO: Fix this complex function. @@ -390,13 +390,14 @@ class Population(object): ) # Load memory adress - self.grid_options[ - "custom_logging_func_memaddr" - ], self.grid_options["custom_logging_shared_library_file"] = create_and_load_logging_function( + ( + self.grid_options["custom_logging_func_memaddr"], + self.grid_options["custom_logging_shared_library_file"], + ) = create_and_load_logging_function( custom_logging_code, verbose=self.grid_options["verbose"] ) - elif self.grid_options["C_auto_logging"]: + elif self.grid_options["C_auto_logging"]: # Generate real logging code logging_line = autogen_C_logging_code( self.grid_options["C_auto_logging"], @@ -409,9 +410,10 @@ class Population(object): ) # Load memory adress - self.grid_options[ - "custom_logging_func_memaddr" - ], self.grid_options["custom_logging_shared_library_file"] = create_and_load_logging_function( + ( + self.grid_options["custom_logging_func_memaddr"], + self.grid_options["custom_logging_shared_library_file"], + ) = create_and_load_logging_function( custom_logging_code, verbose=self.grid_options["verbose"] ) @@ -442,7 +444,7 @@ class Population(object): # TODO: add call to function that cleans up the temp customlogging dir, and unloads the loaded libraries. # TODO: make a switch to turn this off if clean_up_custom_logging_files: - self.clean_up_custom_logging(evol_type='single') + self.clean_up_custom_logging(evol_type="single") # Parse if parse_function: @@ -457,10 +459,11 @@ class Population(object): if os.path.exists(file): try: - if verbose > 0: print("Removed {}".format()) + if verbose > 0: + print("Removed {}".format()) os.remove(file) except: - if verbose > 0: print("Error while deleting file {}".format(file)) + print("Error while deleting file {}".format(file)) raise FileNotFoundError def clean_up_custom_logging(self, evol_type): @@ -475,19 +478,24 @@ class Population(object): - TODO: make this and design this """ - if evol_type=='single': - if self.grid_options['verbose'] > 0: print("Cleaning up the custom logging stuff. type: single") + if evol_type == "single": + if self.grid_options["verbose"] > 0: + print("Cleaning up the custom logging stuff. type: single") # TODO: Unset custom logging code # TODO: Unset function memory adress - print(self.grid_options["custom_logging_func_memaddr"]) + print(self.grid_options["custom_logging_func_memaddr"]) # remove shared library files if self.grid_options["custom_logging_shared_library_file"]: - self.remove_file(self.grid_options["custom_logging_shared_library_file"], self.grid_options['verbose']) + self.remove_file( + self.grid_options["custom_logging_shared_library_file"], + self.grid_options["verbose"], + ) - if evol_type=='population': - if self.grid_options['verbose'] > 0: print("Cleaning up the custom logging stuffs. type: population") + if evol_type == "population": + if self.grid_options["verbose"] > 0: + print("Cleaning up the custom logging stuffs. type: population") # TODO: make sure that these also work. not fully sure if necessary tho. whether its a single file, or a dict of files/memaddresses def evolve_population(self, parse_function, custom_arg_file=None): @@ -619,13 +627,9 @@ class Population(object): ######################################################### duration_no_mp = stop_no_mp - start_no_mp - duration_mp = stop_mp - start_mp - ratio = duration_no_mp/duration_mp - print( - "Running mp versus no mp is {} times faster!".format( - ratio - ) - ) + duration_mp = stop_mp - start_mp + ratio = duration_no_mp / duration_mp + print("Running mp versus no mp is {} times faster!".format(ratio)) return (nodes, amt, duration_no_mp, duration_mp, ratio) @@ -711,10 +715,6 @@ class Population(object): print("\n\nBinary_c output:") print(output) - - - - ################################################### # Unordered functions ################################################### @@ -724,13 +724,13 @@ class Population(object): Function to add to the total probability """ - self.grid_options['probtot'] += prob + self.grid_options["probtot"] += prob def increment_count(self): """ Function to add to the total amount of stars """ - self.grid_options['count'] += 1 + self.grid_options["count"] += 1 ################################################### # Gridcode functions @@ -739,6 +739,10 @@ class Population(object): def generate_grid_code(self, dry_run=False): """ Function that generates the code from which the population will be made. + + + The phasevol values are handled by generating a second array + # DONE: make a generator for this. # TODO: Add correct logging everywhere @@ -748,6 +752,10 @@ class Population(object): # TODO: add part to correctly set the values with the spacingsfunctions. # TODO: add phasevol correctly # TODO: add centering center left right for the spacing + # TODO: add sensible description to this function. + + Results in a generated file that contains a system_generator function. + """ if self.grid_options["verbose"] > 0: @@ -802,7 +810,7 @@ class Population(object): ) ) code_string += indent * depth + "parameter_dict = {}\n" - code_string += indent * depth + "phasevol = 0\n" + code_string += indent * depth + "phasevol = 0\n" code_string += indent * depth + "\n" code_string += indent * depth + "# setting probability lists\n" @@ -823,10 +831,12 @@ class Population(object): code_string += indent * depth + "\n" # Generate code print("Generating grid code") - for loopnr, el in enumerate(sorted( - self.grid_options["grid_variables"].items(), - key=lambda x: x[1]["grid_variable_number"], - )): + for loopnr, el in enumerate( + sorted( + self.grid_options["grid_variables"].items(), + key=lambda x: x[1]["grid_variable_number"], + ) + ): print("Constructing/adding: {}".format(el[0])) grid_variable = el[1] @@ -849,7 +859,7 @@ class Population(object): + "\n" ) - # Add condition failed action: #TODO: add correct exception error + # Add condition failed action: code_string += ( indent * (depth + 1) + 'print("Condition for {} not met!")'.format( @@ -891,9 +901,10 @@ class Population(object): + "\n" ) - # Set phasevol - code_string += (indent * (depth + 1) + "if {} > 0: phasevol *= {}\n".format(grid_variable["name"], grid_variable["name"])) + code_string += indent * (depth + 1) + "if {} > 0: phasevol *= {}\n".format( + grid_variable["name"], grid_variable["name"] + ) ####################### # Probabilities @@ -957,15 +968,15 @@ class Population(object): # Add some space code_string += "\n" - # The final parts of the code, where things are returned, are within the deepest loop, + # The final parts of the code, where things are returned, are within the deepest loop, # but in some cases code from a higher loop needs to go under it again - # SO I think its better to put an ifstatement here that checks whether this is the last loop. - if loopnr == len(self.grid_options["grid_variables"]) -1: + # SO I think its better to put an ifstatement here that checks whether this is the last loop. + if loopnr == len(self.grid_options["grid_variables"]) - 1: ################################################################################# # Here are the calls to the queuing or other solution. this part is for every system # Add comment - code_string += (indent * (depth + 1) + "#" * 40 + "\n") + code_string += indent * (depth + 1) + "#" * 40 + "\n" code_string += ( indent * (depth + 1) + "# Code below will get evaluated for every generated system\n" @@ -989,20 +1000,15 @@ class Population(object): # set probability and phasevol values code_string += ( indent * (depth + 1) - + 'parameter_dict["{}"] = {}'.format( - 'probability', 'probability' - ) + + 'parameter_dict["{}"] = {}'.format("probability", "probability") + "\n" ) code_string += ( indent * (depth + 1) - + 'parameter_dict["{}"] = {}'.format( - 'phasevol', 'phasevol' - ) + + 'parameter_dict["{}"] = {}'.format("phasevol", "phasevol") + "\n" ) - # Some prints. will be removed # code_string += indent * (depth + 1) + "print(probabilities)\n" # code_string += ( @@ -1010,7 +1016,9 @@ class Population(object): # ) # Increment total probability - code_string += indent * (depth + 1) + 'self.increment_probtot(probability)\n' + code_string += ( + indent * (depth + 1) + "self.increment_probtot(probability)\n" + ) if not dry_run: # Handling of what is returned, or what is not. @@ -1025,21 +1033,23 @@ class Population(object): else: code_string += indent * (depth + 1) + "pass\n" - code_string += (indent * (depth + 1) + "#" * 40 + "\n") + code_string += indent * (depth + 1) + "#" * 40 + "\n" # increment depth depth += 1 - depth -= 1 code_string += "\n" # Write parts to write below the part that yield the results. this has to go in a reverse order: - # Here comes the stuff that is put after the deepest nested part that calls returns stuff. - for loopnr, el in enumerate(sorted( - self.grid_options["grid_variables"].items(), - key=lambda x: x[1]["grid_variable_number"], reverse=True - )): - code_string += (indent * (depth + 1) + "#" * 40 + "\n") + # Here comes the stuff that is put after the deepest nested part that calls returns stuff. + for loopnr, el in enumerate( + sorted( + self.grid_options["grid_variables"].items(), + key=lambda x: x[1]["grid_variable_number"], + reverse=True, + ) + ): + code_string += indent * (depth + 1) + "#" * 40 + "\n" code_string += ( indent * (depth + 1) + "# Code below is for finalising the handling of this iteration of the parameter\n" @@ -1047,20 +1057,24 @@ class Population(object): # Set phasevol # TODO: fix. this isnt supposed to be the value that we give it here. discuss - code_string += (indent * (depth + 1) + "if {} > 0: phasevol /= {}\n".format(grid_variable["name"], grid_variable["name"])) + code_string += indent * (depth + 1) + "if {} > 0: phasevol /= {}\n".format( + grid_variable["name"], grid_variable["name"] + ) depth -= 1 - ################ # Finalising print statements # - code_string += (indent * (depth + 1) + "\n") - code_string += (indent * (depth + 1) + "#" * 40 + "\n") - code_string += (indent * (depth + 1) + "print('Grid has handled {} stars'.format(total_starcount))\n") - code_string += (indent * (depth + 1) + "print('with a total probability of {}'.format(2))\n") - - + code_string += indent * (depth + 1) + "\n" + code_string += indent * (depth + 1) + "#" * 40 + "\n" + code_string += ( + indent * (depth + 1) + + "print('Grid has handled {} stars'.format(total_starcount))\n" + ) + code_string += ( + indent * (depth + 1) + "print('with a total probability of {}'.format(2))\n" + ) ################################################################################# # Stop of code generation. Here the code is saved and written @@ -1094,7 +1108,6 @@ class Population(object): pass - def load_grid_function(self): """ Test function to run grid stuff. mostly to test the import @@ -1118,7 +1131,7 @@ class Population(object): spec.loader.exec_module(grid_file) generator = grid_file.grid_code(self) - self.grid_options['system_generator'] = generator + self.grid_options["system_generator"] = generator if self.grid_options["verbose"] > 0: print("Grid code loaded") @@ -1130,7 +1143,6 @@ class Population(object): # print(next(generator)) # print(next(generator)) - def write_binary_c_calls_to_file(self, output_dir=None, output_filename=None): """ Function that loops over the gridcode and writes the generated parameters to a file. In the form of a commandline call @@ -1138,39 +1150,39 @@ class Population(object): On default this will write to the datadir, if it exists """ - if self.grid_options['system_generator']: - if self.custom_options.get('data_dir', None): - binary_c_calls_output_dir = self.custom_options['data_dir'] - print('yo') + if self.grid_options["system_generator"]: + if self.custom_options.get("data_dir", None): + binary_c_calls_output_dir = self.custom_options["data_dir"] + print("yo") else: if not output_dir: # if self.grid_options['verbose'] > 0: - print("Error. No data_dir configured and you gave no output_dir. Aborting") + print( + "Error. No data_dir configured and you gave no output_dir. Aborting" + ) raise ValueError else: binary_c_calls_output_dir = output_dir if output_filename: - binary_c_calls_filename = output_filename + binary_c_calls_filename = output_filename else: - binary_c_calls_filename = 'binary_c_calls.txt' + binary_c_calls_filename = "binary_c_calls.txt" print(binary_c_calls_output_dir, binary_c_calls_filename) - with open(os.path.join(binary_c_calls_output_dir, binary_c_calls_filename), 'w') as f: - for system in self.grid_options['system_generator']: + with open( + os.path.join(binary_c_calls_output_dir, binary_c_calls_filename), "w" + ) as f: + for system in self.grid_options["system_generator"]: full_system_dict = self.bse_options.copy() full_system_dict.update(system) binary_cmdline_string = self.return_argline(full_system_dict) - f.write(binary_cmdline_string + '\n') + f.write(binary_cmdline_string + "\n") else: - if self.grid_options['verbose'] > 0: - print("Error. No grid function found!") - raise KeyError - - - + print("Error. No grid function found!") + raise KeyError ################################################################################################ diff --git a/binarycpython/utils/grid_options_defaults.py b/binarycpython/utils/grid_options_defaults.py index 69acd1c61eaf78a39f8dbbeb621670c0826a3e7c..ea4119291d2d264e8b7ca5b8fedd77ce703936ce 100644 --- a/binarycpython/utils/grid_options_defaults.py +++ b/binarycpython/utils/grid_options_defaults.py @@ -8,10 +8,16 @@ grid_options_defaults_dict = { "amt_cores": 1, # total amount of cores used to evolve the population "verbose": 0, # Level of verbosity of the simulation # binary_c files - "binary_c_executable": os.path.join(os.environ["BINARY_C"], "binary_c-config") # TODO: make this more robust - "binary_c_shared_library": os.path.join(os.environ["BINARY_C"], "src", "libbinary_c.so") # TODO: make this more robust - "binary_c_config_executable": os.path.join(os.environ["BINARY_C"], "binary_c-config")# TODO: make this more robust - + "binary_c_executable": os.path.join( + os.environ["BINARY_C"], "binary_c-config" + ), # TODO: make this more robust + "binary_c_shared_library": os.path.join( + os.environ["BINARY_C"], "src", "libbinary_c.so" + ), # TODO: make this more robust + "binary_c_config_executable": os.path.join( + os.environ["BINARY_C"], "binary_c-config" + ), # TODO: make this more robust + "binary_c_dir": os.environ["BINARYC_DIR"], # Output dir # "output_dir": # Custom logging @@ -25,21 +31,19 @@ grid_options_defaults_dict = { "log_args": 0, # "log_args_dir": "/tmp/", # Grid variables: instructions to generate the values of the parameters - "grid_variables": {}, # grid variables + "grid_variables": {}, # grid variables "grid_code": None, # literal grid code - "gridcode_filename": None, # filename of gridcode + "gridcode_filename": None, # filename of gridcode "count": 0, # total count of systems - "probtot": 0, # total probabilit - "system_generator": None, # value that holds the function that generates the system (result of building the grid script) + "probtot": 0, # total probabilit + "system_generator": None, # value that holds the function that generates the system (result of building the grid script) # binary "binary": 0, # Locations: - "binary_c_dir": os.environ["BINARYC_DIR"], "tmp_dir": temp_dir(), # Probability: "weight": 1.0, # weighting for the probability "repeat": 1.0, # number of times to repeat each system (probability is adjusted to be 1/repeat) - ## # return_array_refs=>1, # quicker data parsing mode # sort_args=>1, diff --git a/binarycpython/utils/useful_funcs.py b/binarycpython/utils/useful_funcs.py index ac83dd89530d6c812b5609e16933e813b025dd09..cf1b5b444224bc2adad9d1e53da7e78f96d248ac 100644 --- a/binarycpython/utils/useful_funcs.py +++ b/binarycpython/utils/useful_funcs.py @@ -1,5 +1,5 @@ ############################################################ -# Collection of useful functions. +# Collection of useful functions. # Part of this is copied/inspired by Robs # Rob's binary_stars module @@ -8,13 +8,14 @@ # calc_period_from_sep($m1,$m2,$sep) calculate the period given the separation. # calc_sep_from_period($m1,$m2,per) does the inverse. # M1,M2,separation are in solar units, period in days. -# rzams($m,$z) gives you the ZAMS radius of a star +# rzams($m,$z) gives you the ZAMS radius of a star # ZAMS_collision($m1,$m2,$e,$sep,$z) returns 1 if stars collide on the ZAMS ### # TODO: check whether these are correct -AURSUN = 2.150445198804013386961742071435e+02 -YEARDY = 3.651995478818308811241877265275e+02 +AURSUN = 2.150445198804013386961742071435e02 +YEARDY = 3.651995478818308811241877265275e02 + def calc_period_from_sep(M1, M2, sep): """ @@ -22,7 +23,8 @@ def calc_period_from_sep(M1, M2, sep): args : M1, M2, separation (Rsun) """ - return YEARDY*(sep/AURSUN)*sqrt(sep/(AURSUN*(M1+M2))); + return YEARDY * (sep / AURSUN) * sqrt(sep / (AURSUN * (M1 + M2))) + def calc_sep_from_period(M1, M2, period): """ @@ -30,22 +32,26 @@ def calc_sep_from_period(M1, M2, period): args : M1, M2, period (days) """ - return AURSUN*(period*period*(M1+M2)/(YEARDY*YEARDY))**(1.0/3.0); + return AURSUN * (period * period * (M1 + M2) / (YEARDY * YEARDY)) ** (1.0 / 3.0) + def roche_lobe(q): """ A function to evaluate R_L/a(q), Eggleton 1983. """ - - p = q**(1.0/3.0); - return(0.49*p*p/(0.6*p*p + log(1.0+p))); + + p = q ** (1.0 / 3.0) + return 0.49 * p * p / (0.6 * p * p + log(1.0 + p)) + def ragb(m, z): """ Function to calculate radius of a star at first thermal pulse as a function of mass (z=0.02) """ # Z=0.02 only, but also good for Z=0.001 - return m*40.0+20.0; # in Rsun + return m * 40.0 + 20.0 + # in Rsun + def zams_collission(m1, m2, sep, e, z): """ @@ -53,22 +59,23 @@ def zams_collission(m1, m2, sep, e, z): # determine if two stars collide on the ZAMS """ # calculate periastron distance - peri_distance=(1.0-e) * sep - + peri_distance = (1.0 - e) * sep + # calculate star radii - r1=rzams(m1, z); - r2=rzams(m2, z); + r1 = rzams(m1, z) + r2 = rzams(m2, z) if r1 + r2 > peri_distance: return 1 else: return 0 + def rzams(m, z): """ Function to determine the radius of a ZAMS star as a function of m and z: """ - lzs = math.log10(z/0.02) + lzs = math.log10(z / 0.02) # # A function to evaluate Rzams @@ -76,18 +83,108 @@ def rzams(m, z): # p = {} - xz = (666, 0.39704170, -0.32913574, 0.34776688, 0.37470851, 0.09011915, 8.52762600,-24.41225973, 56.43597107, 37.06152575, 5.45624060, 0.00025546, -0.00123461, -0.00023246, 0.00045519, 0.00016176, 5.43288900, -8.62157806, 13.44202049, 14.51584135, 3.39793084, 5.56357900,-10.32345224, 19.44322980, 18.97361347, 4.16903097, 0.78866060, -2.90870942, 6.54713531, 4.05606657, 0.53287322, 0.00586685, -0.01704237, 0.03872348, 0.02570041, 0.00383376, 1.71535900, 0.62246212, -0.92557761, -1.16996966,-0.30631491, 6.59778800, -0.42450044,-12.13339427,-10.73509484,-2.51487077, 10.08855000, -7.11727086,-31.67119479,-24.24848322,-5.33608972, 1.01249500, 0.32699690, -0.00923418, -0.03876858,-0.00412750, 0.07490166, 0.02410413, 0.07233664, 0.03040467, 0.00197741, 0.01077422, 3.08223400, 0.94472050, -2.15200882, -2.49219496,-0.63848738, 17.84778000, -7.45345690,-48.96066856,-40.05386135,-9.09331816, 0.00022582, -0.00186899, 0.00388783, 0.00142402,-0.00007671); - - p['8'] = xz[36]+lzs*(xz[37]+lzs*(xz[38]+lzs*(xz[39]+lzs*xz[40]))); - p['9'] = xz[41]+lzs*(xz[42]+lzs*(xz[43]+lzs*(xz[44]+lzs*xz[45]))); - p['10'] = xz[46]+lzs*(xz[47]+lzs*(xz[48]+lzs*(xz[49]+lzs*xz[50]))); - p['11'] = xz[51]+lzs*(xz[52]+lzs*(xz[53]+lzs*(xz[54]+lzs*xz[55]))); - p['12'] = xz[56]+lzs*(xz[57]+lzs*(xz[58]+lzs*(xz[59]+lzs*xz[60]))); - p['13'] = xz[61]; - p['14'] = xz[62]+lzs*(xz[63]+lzs*(xz[64]+lzs*(xz[65]+lzs*xz[66]))); - p['15'] = xz[67]+lzs*(xz[68]+lzs*(xz[69]+lzs*(xz[70]+lzs*xz[71]))); - p['16'] = xz[72]+lzs*(xz[73]+lzs*(xz[74]+lzs*(xz[75]+lzs*xz[76]))); - - m195 = m**19.5 - rad = ((p['8']*(m**2.5) + p['9']*(m**6.5) + p['10']*(m**11) + p['11']*(m**19)+p['12']*m195)/(p['13'] + p['14']*(m*m) + p['15']*(m**8.5) + (m**18.5) + p['16']*m195)) - return rad \ No newline at end of file + xz = ( + 666, + 0.39704170, + -0.32913574, + 0.34776688, + 0.37470851, + 0.09011915, + 8.52762600, + -24.41225973, + 56.43597107, + 37.06152575, + 5.45624060, + 0.00025546, + -0.00123461, + -0.00023246, + 0.00045519, + 0.00016176, + 5.43288900, + -8.62157806, + 13.44202049, + 14.51584135, + 3.39793084, + 5.56357900, + -10.32345224, + 19.44322980, + 18.97361347, + 4.16903097, + 0.78866060, + -2.90870942, + 6.54713531, + 4.05606657, + 0.53287322, + 0.00586685, + -0.01704237, + 0.03872348, + 0.02570041, + 0.00383376, + 1.71535900, + 0.62246212, + -0.92557761, + -1.16996966, + -0.30631491, + 6.59778800, + -0.42450044, + -12.13339427, + -10.73509484, + -2.51487077, + 10.08855000, + -7.11727086, + -31.67119479, + -24.24848322, + -5.33608972, + 1.01249500, + 0.32699690, + -0.00923418, + -0.03876858, + -0.00412750, + 0.07490166, + 0.02410413, + 0.07233664, + 0.03040467, + 0.00197741, + 0.01077422, + 3.08223400, + 0.94472050, + -2.15200882, + -2.49219496, + -0.63848738, + 17.84778000, + -7.45345690, + -48.96066856, + -40.05386135, + -9.09331816, + 0.00022582, + -0.00186899, + 0.00388783, + 0.00142402, + -0.00007671, + ) + + p["8"] = xz[36] + lzs * (xz[37] + lzs * (xz[38] + lzs * (xz[39] + lzs * xz[40]))) + p["9"] = xz[41] + lzs * (xz[42] + lzs * (xz[43] + lzs * (xz[44] + lzs * xz[45]))) + p["10"] = xz[46] + lzs * (xz[47] + lzs * (xz[48] + lzs * (xz[49] + lzs * xz[50]))) + p["11"] = xz[51] + lzs * (xz[52] + lzs * (xz[53] + lzs * (xz[54] + lzs * xz[55]))) + p["12"] = xz[56] + lzs * (xz[57] + lzs * (xz[58] + lzs * (xz[59] + lzs * xz[60]))) + p["13"] = xz[61] + p["14"] = xz[62] + lzs * (xz[63] + lzs * (xz[64] + lzs * (xz[65] + lzs * xz[66]))) + p["15"] = xz[67] + lzs * (xz[68] + lzs * (xz[69] + lzs * (xz[70] + lzs * xz[71]))) + p["16"] = xz[72] + lzs * (xz[73] + lzs * (xz[74] + lzs * (xz[75] + lzs * xz[76]))) + + m195 = m ** 19.5 + rad = ( + p["8"] * (m ** 2.5) + + p["9"] * (m ** 6.5) + + p["10"] * (m ** 11) + + p["11"] * (m ** 19) + + p["12"] * m195 + ) / ( + p["13"] + + p["14"] * (m * m) + + p["15"] * (m ** 8.5) + + (m ** 18.5) + + p["16"] * m195 + ) + return rad diff --git a/coverage.txt b/coverage.txt new file mode 100644 index 0000000000000000000000000000000000000000..547b8b8dbb9fe7f011bab00c2aaddb3b9b41a97a --- /dev/null +++ b/coverage.txt @@ -0,0 +1,208 @@ + +File: "binary_c-python/__init__.py" + - File is empty + Needed: 0; Found: 0; Missing: 0; Coverage: 0.0% + +File: "binary_c-python/setup.py" + - No module docstring + - No docstring for `readme` + Needed: 2; Found: 0; Missing: 2; Coverage: 0.0% + +File: "binary_c-python/examples/example_population.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/examples/examples.py" + - No module docstring + Needed: 6; Found: 5; Missing: 1; Coverage: 83.3% + +File: "binary_c-python/examples/example_plotting_distributions.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/examples/examples_custom_logging.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/examples/.ipynb_checkpoints/examples-checkpoint.py" + - No module docstring + Needed: 5; Found: 4; Missing: 1; Coverage: 80.0% + +File: "binary_c-python/snippets/multiprocessing_comparison.py" + - No module docstring + - No docstring for `parse_function` + - No docstring for `evolve_mp` + - No docstring for `g` + Needed: 5; Found: 1; Missing: 4; Coverage: 20.0% + +File: "binary_c-python/snippets/verbose.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/snippets/test.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/snippets/increment.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/snippets/phasevol.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/snippets/dict_merging.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/snippets/yield_test.py" + - No module docstring + - No docstring for `yielder` + Needed: 2; Found: 0; Missing: 2; Coverage: 0.0% + +File: "binary_c-python/snippets/montecarlo_example.py" + - No module docstring + - No docstring for `mass_montecarlo` + - No docstring for `calc_alpha` + - No docstring for `period_montecarlo` + - No docstring for `calc_beta` + - No docstring for `massratio_montecarlo` + - No docstring for `eccentricity_montecarlo` + Needed: 7; Found: 0; Missing: 7; Coverage: 0.0% + +File: "binary_c-python/snippets/defaultdict.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/snippets/multiprocessing_test.py" + - No module docstring + - No docstring for `calculate` + - No docstring for `calculate.run` + - No docstring for `run.f` + Needed: 4; Found: 0; Missing: 4; Coverage: 0.0% + +File: "binary_c-python/docs/source/conf.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/lib/__init__.py" + - File is empty + Needed: 0; Found: 0; Missing: 0; Coverage: 0.0% + +File: "binary_c-python/binarycpython/__init__.py" + - File is empty + Needed: 0; Found: 0; Missing: 0; Coverage: 0.0% + +File: "binary_c-python/binarycpython/core/__init__.py" + - File is empty + Needed: 0; Found: 0; Missing: 0; Coverage: 0.0% + +File: "binary_c-python/binarycpython/utils/grid_options_defaults.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/binarycpython/utils/distribution_functions.py" + - No module docstring + - No docstring for `calculate_constants_three_part_powerlaw` + - No docstring for `Arenou2010_binary_fraction` + Needed: 23; Found: 20; Missing: 3; Coverage: 87.0% + +File: "binary_c-python/binarycpython/utils/stellar_types.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/binarycpython/utils/spacing_functions.py" + - No module docstring + - No docstring for `const` + Needed: 2; Found: 0; Missing: 2; Coverage: 0.0% + +File: "binary_c-python/binarycpython/utils/useful_funcs.py" + - No module docstring + Needed: 7; Found: 6; Missing: 1; Coverage: 85.7% + +File: "binary_c-python/binarycpython/utils/__init__.py" + - File is empty + Needed: 0; Found: 0; Missing: 0; Coverage: 0.0% + +File: "binary_c-python/binarycpython/utils/custom_logging_functions.py" + - No module docstring + Needed: 9; Found: 8; Missing: 1; Coverage: 88.9% + +File: "binary_c-python/binarycpython/utils/functions.py" + - No module docstring + - No docstring for `parse_binary_c_version_info` + Needed: 13; Found: 11; Missing: 2; Coverage: 84.6% + +File: "binary_c-python/binarycpython/utils/grid.py" + - No module docstring + - No docstring for `Population` + - No docstring for `Population.set_bse_option` + - No docstring for `evolve_population_comparison.evolve_mp` + - No docstring for `evolve_population_comparison.g` + - No docstring for `evolve_population_mp.evolve_mp` + - No docstring for `evolve_population_mp.g` + Needed: 32; Found: 25; Missing: 7; Coverage: 78.1% + +File: "binary_c-python/binarycpython/utils/.ipynb_checkpoints/custom_logging_functions-checkpoint.py" + - No module docstring + Needed: 9; Found: 8; Missing: 1; Coverage: 88.9% + +File: "binary_c-python/binarycpython/utils/.ipynb_checkpoints/functions-checkpoint.py" + - No module docstring + - No docstring for `load_logfile` + Needed: 8; Found: 6; Missing: 2; Coverage: 75.0% + +File: "binary_c-python/tests/python_API_test.py" + - No module docstring + - No docstring for `test_run_binary` + - No docstring for `test_return_help` + - No docstring for `test_return_arglines` + - No docstring for `test_run_binary_with_log` + - No docstring for `test_run_binary_with_custom_logging` + - No docstring for `test_return_help_all` + - No docstring for `test_return_version_info` + - No docstring for `test_return_store` + - No docstring for `test_run_system` + Needed: 10; Found: 0; Missing: 10; Coverage: 0.0% + +File: "binary_c-python/tests/random_tests.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/tests/function_tests.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/tests/population/plot_scaling.py" + - No module docstring + - No docstring for `calc_mean_and_std` + Needed: 2; Found: 0; Missing: 2; Coverage: 0.0% + +File: "binary_c-python/tests/population/grid_tests_cmdline.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/tests/population/multiprocessing_via_population_comparison.py" + - No module docstring + - No docstring for `parse_function` + Needed: 3; Found: 1; Missing: 2; Coverage: 33.3% + +File: "binary_c-python/tests/population/grid_tests.py" + - No module docstring + Needed: 1; Found: 0; Missing: 1; Coverage: 0.0% + +File: "binary_c-python/tests/population/global_variable_for_distributions.py" + - No module docstring + - No docstring for `with_glob` + - No docstring for `without_glob` + Needed: 6; Found: 3; Missing: 3; Coverage: 50.0% + +File: "binary_c-python/.ipynb_checkpoints/python_API_test-checkpoint.py" + - No module docstring + - No docstring for `run_test_binary` + Needed: 2; Found: 0; Missing: 2; Coverage: 0.0% + + +Overall statistics for 41 files (5 files are empty): +Docstrings needed: 173; Docstrings found: 98; Docstrings missing: 75 +Total docstring coverage: 56.6%; Grade: Not bad diff --git a/examples/example_plotting_distributions.py b/examples/example_plotting_distributions.py index 15cedaf23d3681c68531a955fe23181556424ad7..49c1c9167938782d6bef395d52796dba7c262955 100644 --- a/examples/example_plotting_distributions.py +++ b/examples/example_plotting_distributions.py @@ -3,11 +3,25 @@ import math import numpy as np import matplotlib.pyplot as plt -from binarycpython.utils.distribution_functions import three_part_powerlaw, Kroupa2001, Arenou2010_binary_fraction, raghavan2010_binary_fraction,imf_scalo1998, imf_scalo1986, imf_tinsley1980, imf_scalo1998, imf_chabrier2003, flatsections, duquennoy1991, sana12 +from binarycpython.utils.distribution_functions import ( + three_part_powerlaw, + Kroupa2001, + Arenou2010_binary_fraction, + raghavan2010_binary_fraction, + imf_scalo1998, + imf_scalo1986, + imf_tinsley1980, + imf_scalo1998, + imf_chabrier2003, + flatsections, + duquennoy1991, + sana12, +) from binarycpython.utils.useful_funcs import calc_sep_from_period + ################################################ # Example script to plot the available probability distributions. -# TODO: +# TODO: ################################################ # mass distribution plots @@ -16,7 +30,7 @@ from binarycpython.utils.useful_funcs import calc_sep_from_period # kroupa_probability = [Kroupa2001(mass) for mass in mass_values] # scalo1986 = [imf_scalo1986(mass) for mass in mass_values] -# tinsley1980 = [imf_tinsley1980(mass) for mass in mass_values] +# tinsley1980 = [imf_tinsley1980(mass) for mass in mass_values] # scalo1998 = [imf_scalo1998(mass) for mass in mass_values] # chabrier2003 = [imf_chabrier2003(mass) for mass in mass_values] @@ -80,51 +94,66 @@ duquennoy1991_distribution = [duquennoy1991(logper) for logper in logperiod_valu # Sana12 distributions m1 = 10 m2 = 5 -period_min = 10**0.15 -period_max = 10**5.5 - -sana12_distribution_q05 = [sana12(m1, - m2, - calc_sep_from_period(m1, m2, 10**logper), - 10**logper, - calc_sep_from_period(m1, m2, period_min), - calc_sep_from_period(m1, m2, period_max), - math.log10(period_min), - math.log10(period_max), - -0.55) for logper in logperiod_values] +period_min = 10 ** 0.15 +period_max = 10 ** 5.5 + +sana12_distribution_q05 = [ + sana12( + m1, + m2, + calc_sep_from_period(m1, m2, 10 ** logper), + 10 ** logper, + calc_sep_from_period(m1, m2, period_min), + calc_sep_from_period(m1, m2, period_max), + math.log10(period_min), + math.log10(period_max), + -0.55, + ) + for logper in logperiod_values +] m1 = 10 m2 = 1 -sana12_distribution_q01 = [sana12(m1, - m2, - calc_sep_from_period(m1, m2, 10**logper), - 10**logper, - calc_sep_from_period(m1, m2, period_min), - calc_sep_from_period(m1, m2, period_max), - math.log10(period_min), - math.log10(period_max), - -0.55) for logper in logperiod_values] +sana12_distribution_q01 = [ + sana12( + m1, + m2, + calc_sep_from_period(m1, m2, 10 ** logper), + 10 ** logper, + calc_sep_from_period(m1, m2, period_min), + calc_sep_from_period(m1, m2, period_max), + math.log10(period_min), + math.log10(period_max), + -0.55, + ) + for logper in logperiod_values +] m1 = 10 m2 = 10 -sana12_distribution_q1 = [sana12(m1, - m2, - calc_sep_from_period(m1, m2, 10**logper), - 10**logper, - calc_sep_from_period(m1, m2, period_min), - calc_sep_from_period(m1, m2, period_max), - math.log10(period_min), - math.log10(period_max), - -0.55) for logper in logperiod_values] - - -plt.plot(logperiod_values, duquennoy1991_distribution, label='Duquennoy & Mayor 1991') -plt.plot(logperiod_values, sana12_distribution_q05, label='Sana 12 (q=0.5)') -plt.plot(logperiod_values, sana12_distribution_q01, label='Sana 12 (q=0.1)') -plt.plot(logperiod_values, sana12_distribution_q1, label='Sana 12 (q=1)') -plt.title('Period distributions') -plt.ylabel(r'Probability') -plt.xlabel(r'Log10(orbital period)') +sana12_distribution_q1 = [ + sana12( + m1, + m2, + calc_sep_from_period(m1, m2, 10 ** logper), + 10 ** logper, + calc_sep_from_period(m1, m2, period_min), + calc_sep_from_period(m1, m2, period_max), + math.log10(period_min), + math.log10(period_max), + -0.55, + ) + for logper in logperiod_values +] + + +plt.plot(logperiod_values, duquennoy1991_distribution, label="Duquennoy & Mayor 1991") +plt.plot(logperiod_values, sana12_distribution_q05, label="Sana 12 (q=0.5)") +plt.plot(logperiod_values, sana12_distribution_q01, label="Sana 12 (q=0.1)") +plt.plot(logperiod_values, sana12_distribution_q1, label="Sana 12 (q=1)") +plt.title("Period distributions") +plt.ylabel(r"Probability") +plt.xlabel(r"Log10(orbital period)") plt.grid() plt.legend() plt.show() diff --git a/examples/example_population.py b/examples/example_population.py index e466c45507c6a02c39ab23b36fd713e6196d664c..4ee06d7d7bd669dd4b9f19bb02a3180059597723 100644 --- a/examples/example_population.py +++ b/examples/example_population.py @@ -8,7 +8,6 @@ from binarycpython.utils.grid import Population from binarycpython.utils.functions import get_help_all, get_help, create_hdf5 - ######################################################### # This file serves as an example for running a population. # The use of help(<function>) is a good way to inspect what parameters are there to use @@ -52,7 +51,7 @@ example_pop.set( ) # TODO: show when reading from file -example_pop.set_custom_logging() # TODO: remove this one +example_pop.set_custom_logging() # TODO: remove this one # Adding grid variables: example_pop.add_grid_variable( @@ -90,4 +89,4 @@ example_pop.export_all_info() # Wrapping up the results to an hdf5 file can be done by using the create_hdf5(<directory containing data and settings>) # This function takes the settings file (ending in _settings.json) and the data files (ending in .dat) from the data_dir # and packing them into an hdf5 file, which is then written into the same data_dir directory -create_hdf5(example_pop.custom_options["data_dir"]) \ No newline at end of file +create_hdf5(example_pop.custom_options["data_dir"]) diff --git a/setup.py b/setup.py index dd214f0ba785135ac4cde05643b75aedc1cffd6f..086900e4bbdce83840ef56beafaf9a5681dfc834 100644 --- a/setup.py +++ b/setup.py @@ -131,6 +131,7 @@ def readme(): with open("README.md") as f: return f.read() + ############################################################ # Making the extension function ############################################################ diff --git a/tests/population/global_variable_for_distributions.py b/tests/population/global_variable_for_distributions.py index 47b5d3f8aa8cdb531b6ee04469d15750eec842e5..e9114cbc6524c015fe00abddebe5f7b662082e53 100644 --- a/tests/population/global_variable_for_distributions.py +++ b/tests/population/global_variable_for_distributions.py @@ -6,6 +6,7 @@ powerlaw_const = None # Function/test to see the speed up of making values global so we dont have to calculate them each time # Theres only a factor ~2 increase here. + def with_glob(): global powerlaw_const if not powerlaw_const: @@ -15,8 +16,9 @@ def with_glob(): # print('defined') return powerlaw_const + def without_glob(): - powerlaw_const = powerlaw_constant(10, 100, -2) + powerlaw_const = powerlaw_constant(10, 100, -2) return powerlaw_const @@ -36,6 +38,7 @@ def powerlaw_constant(min_val, max_val, k): powerlaw_const = k1 / (max_val ** k1 - min_val ** k1) return powerlaw_const + def powerlaw(min_val, max_val, k, x): """ Single powerlaw with index k at x from min to max @@ -47,7 +50,7 @@ def powerlaw(min_val, max_val, k, x): raise ValueError if (x < min_val) or (x > max_val): - print('value is out of bounds') + print("value is out of bounds") return 0 else: @@ -63,6 +66,7 @@ def powerlaw(min_val, max_val, k, x): # ) return y + def powerlaw_with(min_val, max_val, k, x): """ Single powerlaw with index k at x from min to max @@ -90,6 +94,7 @@ def powerlaw_with(min_val, max_val, k, x): # ) return y + steps = 1000000 start_time_without_glob = time.time() @@ -102,7 +107,11 @@ for i in range(steps): powerlaw_with(10, 100, -2, 20) stop_time_with_glob = time.time() -total_time_without = stop_time_without_glob-start_time_without_glob +total_time_without = stop_time_without_glob - start_time_without_glob total_time_with = stop_time_with_glob - start_time_with_glob -print("without: {}\nwith: {}\nRatio: {}".format(total_time_without, total_time_with, total_time_without/total_time_with)) +print( + "without: {}\nwith: {}\nRatio: {}".format( + total_time_without, total_time_with, total_time_without / total_time_with + ) +) diff --git a/tests/population/grid_tests.py b/tests/population/grid_tests.py index e439dc33865c3999c3b22e24524383fa9fe7a331..986fdca4dd0d51f3dec4cafd821d49dc37f23a6f 100644 --- a/tests/population/grid_tests.py +++ b/tests/population/grid_tests.py @@ -205,8 +205,6 @@ test_pop.set( # test_pop.set(extra_prob_function=ding) - - ### Cleaning up custom logging code # test_pop.set(C_logging_code='Printf("MY_STELLAR_DATA time=%g mass=%g radius=%g\\n", stardata->model.time, stardata->star[0].mass, stardata->star[0].radius);') @@ -214,19 +212,6 @@ test_pop.set( # quit() - - - - - - - - - - - - - ### Grid generating testing test_pop.add_grid_variable( name="lnm1", @@ -260,7 +245,7 @@ test_pop.generate_grid_code() test_pop.load_grid_function() -test_pop.write_binary_c_calls_to_file(output_dir='/home/david/Desktop') +test_pop.write_binary_c_calls_to_file(output_dir="/home/david/Desktop") # print(test_pop.grid_options["code_string"]) diff --git a/tests/population/multiprocessing_via_population_comparison.py b/tests/population/multiprocessing_via_population_comparison.py index 1548516af0edaae4371921d5d71c4fadef91c62d..bf66c753be15e77455437625743b0c2af4471a52 100644 --- a/tests/population/multiprocessing_via_population_comparison.py +++ b/tests/population/multiprocessing_via_population_comparison.py @@ -79,12 +79,12 @@ def parse_function(self, output): test_pop = Population() # test_pop.set( # C_logging_code=""" -# if(stardata->star[0].SN_type != SN_NONE) +# if(stardata->star[0].SN_type != SN_NONE) # { # if (stardata->model.time < stardata->model.max_evolution_time) # { # Printf("DAVID_SN %30.12e %g %g %g %d\\n", -# // +# // # stardata->model.time, // 1 # stardata->star[0].mass, //2 # stardata->previous_stardata->star[0].mass, //3 diff --git a/tests/population/plot_scaling.py b/tests/population/plot_scaling.py index b11d7fcd10b18c6743824f47248008920fe6ec89..fb72b1cf4c8f384e8c4ee25e06258413b89fa49d 100644 --- a/tests/population/plot_scaling.py +++ b/tests/population/plot_scaling.py @@ -1,4 +1,4 @@ -import matplotlib +import matplotlib import os import matplotlib.pyplot as plt import pandas as pd @@ -7,64 +7,84 @@ import numpy as np def calc_mean_and_std(arr): return np.mean(arr), np.std(arr) + + #### # Configure -result_file = 'comparison_result.dat' -result_file = 'comparison_result_astro1.dat' -name_testcase = 'Astro1' -img_dir = 'scaling_plots' +result_file = "comparison_result.dat" +result_file = "comparison_result_astro1.dat" +name_testcase = "Astro1" +img_dir = "scaling_plots" # Readout file results = [] -with open(result_file, 'r') as f: +with open(result_file, "r") as f: for line in f: res = list(eval(line.strip())) - if len(res)==6: - res.append(res[-2]/res[-1]) + if len(res) == 6: + res.append(res[-2] / res[-1]) results.append(res) # make dataframe -headers = ['cores', 'total_systems', 'total_time_sequentially', 'total_time_multiprocessing', 'ratio'] +headers = [ + "cores", + "total_systems", + "total_time_sequentially", + "total_time_multiprocessing", + "ratio", +] df = pd.DataFrame(results) df.columns = headers # Select unique amounts -unique_amt_cores = df['cores'].unique() -unique_amt_systems = df['total_systems'].unique() +unique_amt_cores = df["cores"].unique() +unique_amt_systems = df["total_systems"].unique() # Create dictionary with calculated means and stdevs etc calculated_results = [] for i in unique_amt_cores: for j in unique_amt_systems: - total_time_sequential_list, total_time_multiprocessing_list, total_ratio_list = [], [], [] + ( + total_time_sequential_list, + total_time_multiprocessing_list, + total_ratio_list, + ) = ([], [], []) for res in results: - if (res[0]==i) & (res[1]==j): + if (res[0] == i) & (res[1] == j): total_time_sequential_list.append(res[2]) total_time_multiprocessing_list.append(res[3]) total_ratio_list.append(res[4]) - if (total_time_sequential_list) and (total_time_multiprocessing_list) and (total_ratio_list): + if ( + (total_time_sequential_list) + and (total_time_multiprocessing_list) + and (total_ratio_list) + ): # calculate stuff - mean_time_sequential, std_sequential = calc_mean_and_std(np.array(total_time_sequential_list)) - mean_time_multiprocessing, std_multiprocessing = calc_mean_and_std(np.array(total_time_multiprocessing_list)) + mean_time_sequential, std_sequential = calc_mean_and_std( + np.array(total_time_sequential_list) + ) + mean_time_multiprocessing, std_multiprocessing = calc_mean_and_std( + np.array(total_time_multiprocessing_list) + ) mean_ratio, std_ratio = calc_mean_and_std(np.array(total_ratio_list)) # make dict res_dict = { - 'cores': i, - 'systems': j, - 'mean_time_sequential': mean_time_sequential, - 'mean_time_multiprocessing':mean_time_multiprocessing, - 'mean_ratio': mean_ratio, - 'std_sequential': std_sequential, - 'std_multiprocessing': std_multiprocessing, - 'std_ratio': std_ratio, - 'total_runs': len(total_time_sequential_list) - } + "cores": i, + "systems": j, + "mean_time_sequential": mean_time_sequential, + "mean_time_multiprocessing": mean_time_multiprocessing, + "mean_ratio": mean_ratio, + "std_sequential": std_sequential, + "std_multiprocessing": std_multiprocessing, + "std_ratio": std_ratio, + "total_runs": len(total_time_sequential_list), + } calculated_results.append(res_dict) @@ -73,35 +93,50 @@ for i in unique_amt_cores: x_position_shift = 0 y_position_shift = -0.05 max_speedup = 0 -# https://stackoverflow.com/questions/46323530/matplotlib-plot-two-x-axes-one-linear-and-one-with-logarithmic-ticks +# https://stackoverflow.com/questions/46323530/matplotlib-plot-two-x-axes-one-linear-and-one-with-logarithmic-ticks fig, ax1 = plt.subplots() ax2 = ax1.twinx() for amt_systems in unique_amt_systems: cores = [] speedup = [] - std = [] + std = [] efficiency = [] for el in calculated_results: - if el['systems']==amt_systems: + if el["systems"] == amt_systems: - cores.append(el['cores'] + x_position_shift) - speedup.append(el['mean_ratio']) - std.append(el['std_ratio']) - efficiency.append(el['mean_ratio']/el['cores']) + cores.append(el["cores"] + x_position_shift) + speedup.append(el["mean_ratio"]) + std.append(el["std_ratio"]) + efficiency.append(el["mean_ratio"] / el["cores"]) - if el['mean_ratio'] > max_speedup: - max_speedup = el['mean_ratio'] + if el["mean_ratio"] > max_speedup: + max_speedup = el["mean_ratio"] # add number of runs its based on - ax1.text(el['cores'] + x_position_shift+0.01, el['mean_ratio']+y_position_shift, el['total_runs']) - - ax1.errorbar(cores, speedup, std, linestyle='None', marker='^', label='Speed up & efficiency of {} systems'.format(amt_systems)) + ax1.text( + el["cores"] + x_position_shift + 0.01, + el["mean_ratio"] + y_position_shift, + el["total_runs"], + ) + + ax1.errorbar( + cores, + speedup, + std, + linestyle="None", + marker="^", + label="Speed up & efficiency of {} systems".format(amt_systems), + ) ax2.plot(cores, efficiency, alpha=0.5) x_position_shift += 0.1 -ax1.set_title("Speed up ratio vs amount of cores for different amounts of systems on {}".format(name_testcase)) +ax1.set_title( + "Speed up ratio vs amount of cores for different amounts of systems on {}".format( + name_testcase + ) +) ax1.set_xlabel("Amount of cores used") ax1.set_ylabel("Speed up ratio (time_linear/time_parallel)") @@ -111,9 +146,9 @@ ax2.set_ylim(0, 1) ax1.grid() ax1.legend(loc=4) -fig.savefig(os.path.join(img_dir, 'speedup_scaling_{}.{}'.format(name_testcase, 'png'))) -fig.savefig(os.path.join(img_dir, 'speedup_scaling_{}.{}'.format(name_testcase, 'pdf'))) -fig.savefig(os.path.join(img_dir, 'speedup_scaling_{}.{}'.format(name_testcase, 'eps'))) +fig.savefig(os.path.join(img_dir, "speedup_scaling_{}.{}".format(name_testcase, "png"))) +fig.savefig(os.path.join(img_dir, "speedup_scaling_{}.{}".format(name_testcase, "pdf"))) +fig.savefig(os.path.join(img_dir, "speedup_scaling_{}.{}".format(name_testcase, "eps"))) plt.show() ##### @@ -133,7 +168,7 @@ plt.show() # mean_times_linear = [] # mean_times_multiprocessing = [] -# std_linear = [] +# std_linear = [] # std_mp = [] # for el in calculated_results: @@ -166,4 +201,4 @@ plt.show() # fig.savefig(os.path.join(img_dir, 'total_time_scaling_{}.{}'.format(name_testcase, 'pdf'))) # fig.savefig(os.path.join(img_dir, 'total_time_scaling_{}.{}'.format(name_testcase, 'eps'))) -# plt.show() \ No newline at end of file +# plt.show()