diff --git a/binarycpython/utils/functions.py b/binarycpython/utils/functions.py index 715dbc0b53bb5520f9cfe320f48ba6f57b09168b..7d54cba16837a4f05f084fa7ab9819aae73b6dfc 100644 --- a/binarycpython/utils/functions.py +++ b/binarycpython/utils/functions.py @@ -22,6 +22,7 @@ import binary_c_python_api # utility functions ######################################################## + def verbose_print(message, verbosity, minimal_verbosity): """ Function that decides whether to print a message based on the current verbosity @@ -33,6 +34,7 @@ def verbose_print(message, verbosity, minimal_verbosity): if verbosity >= minimal_verbosity: print(message) + def remove_file(file, verbosity=0): """ Function to remove files but with verbosity @@ -46,6 +48,7 @@ def remove_file(file, verbosity=0): except FileNotFoundError as inst: print("Error while deleting file {}: {}".format(file, inst)) + def temp_dir(): """ Function to return the path the custom logging library shared object @@ -62,6 +65,7 @@ def temp_dir(): return path + def create_hdf5(data_dir, name): """ Function to create an hdf5 file from the contents of a directory: @@ -125,10 +129,12 @@ def create_hdf5(data_dir, name): hdf5_file.close() + ######################################################## # version_info functions ######################################################## + def return_binary_c_version_info(parsed=False): """ Function that returns the version information of binary_c @@ -141,6 +147,7 @@ def return_binary_c_version_info(parsed=False): return version_info + def parse_binary_c_version_info(version_info_string): """ Function that parses the binary_c version info. Length function with a lot of branches @@ -234,16 +241,19 @@ def parse_binary_c_version_info(version_info_string): return version_info_dict + ######################################################## # binary_c output functions ######################################################## + def output_lines(output): """ Function that outputs the lines that were recieved from the binary_c run. """ return output.splitlines() + def parse_output(output, selected_header): """ Function that parses output of binary_c: @@ -309,10 +319,12 @@ def parse_output(output, selected_header): return final_values_dict + ######################################################## # Argument and default value functions ######################################################## + def get_defaults(filter_values=False): """ Function that calls the binaryc get args function and cast it into a dictionary. @@ -335,6 +347,7 @@ def get_defaults(filter_values=False): return default_dict + def get_arg_keys(): """ Function that return the list of possible keys to give in the arg string @@ -342,6 +355,7 @@ def get_arg_keys(): return get_defaults().keys() + def filter_arg_dict(arg_dict): """ Function to filter out keys that contain values included in ['NULL', 'Function', ''] @@ -357,6 +371,7 @@ def filter_arg_dict(arg_dict): return new_dict + def create_arg_string(arg_dict, sort=False, filter_values=False): """ Function that creates the arg string for binary_c. @@ -377,10 +392,12 @@ def create_arg_string(arg_dict, sort=False, filter_values=False): arg_string = arg_string.strip() return arg_string + ######################################################## # Help functions ######################################################## + def get_help(param_name="", print_help=True, fail_silently=False): """ Function that returns the help info for a given parameter. @@ -479,6 +496,7 @@ def get_help(param_name="", print_help=True, fail_silently=False): ) return None + def get_help_all(print_help=True): """ Function that reads out the output of the help_all api call to binary_c @@ -585,6 +603,7 @@ def get_help_all(print_help=True): return help_all_dict + def get_help_super(print_help=False, fail_silently=True): """ Function that first runs get_help_all, and then per argument also run @@ -597,7 +616,7 @@ def get_help_super(print_help=False, fail_silently=True): section = help_all_dict[section_name] print(section_name) for parameter_name in section["parameters"].keys(): - print("\t",parameter_name) + print("\t", parameter_name) help_all_super_dict = help_all_dict.copy() @@ -612,9 +631,7 @@ def get_help_super(print_help=False, fail_silently=True): # Get detailed help info detailed_help = get_help( - parameter_name, - print_help=False, - fail_silently=fail_silently, + parameter_name, print_help=False, fail_silently=fail_silently, ) if detailed_help: @@ -638,16 +655,17 @@ def get_help_super(print_help=False, fail_silently=True): section["parameters"][parameter_name] = parameter - if print_help: print(json.dumps(help_all_super_dict, indent=4)) return help_all_super_dict + ######################################################## # logfile functions ######################################################## + def load_logfile(logfile): """ Experimental function that parses the generated logfile of binary_c. @@ -687,10 +705,12 @@ def load_logfile(logfile): print(event_list) + ######################################################## # Ensemble dict functions ######################################################## + def inspect_dict(dict_1, indent=0, print_structure=True): """ Function to inspect a dict. @@ -705,9 +725,9 @@ def inspect_dict(dict_1, indent=0, print_structure=True): for key, value in dict_1.items(): structure_dict[key] = type(value) if print_structure: - print("\t"*indent, key, type(value)) + print("\t" * indent, key, type(value)) if isinstance(value, dict): - structure_dict[key] = inspect_dict(value, indent=indent+1) + structure_dict[key] = inspect_dict(value, indent=indent + 1) return structure_dict @@ -760,8 +780,11 @@ def merge_dicts(dict_1, dict_2): for key in overlapping_keys: # See whether the types are actually the same if not type(dict_1[key]) is type(dict_2[key]): - print("Error {} and {} are not of the same type and cannot be merged".format( - dict_1[key], dict_2[key])) + print( + "Error {} and {} are not of the same type and cannot be merged".format( + dict_1[key], dict_2[key] + ) + ) raise ValueError # Here we check for the cases that we want to explicitly catch. Ints will be added, @@ -785,8 +808,11 @@ def merge_dicts(dict_1, dict_2): new_dict[key] = merge_dicts(dict_1[key], dict_2[key]) else: - print("Object types {},{} not supported".format( - type(dict_1[key]), type(dict_2[key]))) + print( + "Object types {},{} not supported".format( + type(dict_1[key]), type(dict_2[key]) + ) + ) # return new_dict @@ -797,7 +823,9 @@ class binarycDecoder(json.JSONDecoder): """ def decode(self, s): - result = super().decode(s) # result = super(Decoder, self).decode(s) for Python 2.x + result = super().decode( + s + ) # result = super(Decoder, self).decode(s) for Python 2.x return self._decode(result) def _decode(self, o): @@ -808,7 +836,6 @@ class binarycDecoder(json.JSONDecoder): The try except might be a somewhat rough solution but it catches all cases. """ - # Check if we can turn it into a float # if isinstance(o, str) or isinstance(o, unicode): if isinstance(o, str): @@ -823,6 +850,7 @@ class binarycDecoder(json.JSONDecoder): else: return o + def binaryc_json_serializer(obj): """ Custom serializer for binary_c to use when functions are present in the dictionary diff --git a/binarycpython/utils/grid.py b/binarycpython/utils/grid.py index 4d84ae5ff6e66f98e64b2237fd87872cf86ed803..ed1b651afe988f44f0719a52f4d1c54e31210c5f 100644 --- a/binarycpython/utils/grid.py +++ b/binarycpython/utils/grid.py @@ -38,7 +38,7 @@ from binarycpython.utils.functions import ( binaryc_json_serializer, verbose_print, binarycDecoder, - merge_dicts + merge_dicts, ) import binary_c_python_api @@ -88,7 +88,6 @@ class Population: # Set some memory dicts self.persistent_data_memory_dict = {} - ################################################### # Argument functions ################################################### @@ -132,23 +131,44 @@ class Population: """ # Select the params that end with %d - special_params = [el for el in list(self.defaults.keys()) if el.endswith('%d')] + special_params = [el for el in list(self.defaults.keys()) if el.endswith("%d")] # Go over all the input for key in kwargs: # Filter out keys for the bse_options if key in self.defaults.keys(): - verbose_print("adding: {}={} to BSE_options".format(key, kwargs[key]), self.grid_options["verbosity"], 1) + verbose_print( + "adding: {}={} to BSE_options".format(key, kwargs[key]), + self.grid_options["verbosity"], + 1, + ) self.bse_options[key] = kwargs[key] # Extra check to check if the key fits one of parameter names that end with %d - elif any([True if (key.startswith(param[:-2]) and len(param[:-2]) < len(key)) else False for param in special_params]): - verbose_print("adding: {}={} to BSE_options by catching the %d".format(key, kwargs[key]), self.grid_options["verbosity"], 1) + elif any( + [ + True + if (key.startswith(param[:-2]) and len(param[:-2]) < len(key)) + else False + for param in special_params + ] + ): + verbose_print( + "adding: {}={} to BSE_options by catching the %d".format( + key, kwargs[key] + ), + self.grid_options["verbosity"], + 1, + ) self.bse_options[key] = kwargs[key] # Filter out keys for the grid_options elif key in self.grid_options.keys(): - verbose_print("adding: {}={} to grid_options".format(key, kwargs[key]), self.grid_options["verbosity"], 1) + verbose_print( + "adding: {}={} to grid_options".format(key, kwargs[key]), + self.grid_options["verbosity"], + 1, + ) self.grid_options[key] = kwargs[key] # The of the keys go into a custom_options dict @@ -178,7 +198,11 @@ class Population: # How its set up now is that as input you need to give --cmdline "metallicity=0.002" # Its checked if this exists and handled accordingly. if args.cmdline: - verbose_print("Found cmdline args. Parsing them now", self.grid_options["verbosity"], 1) + verbose_print( + "Found cmdline args. Parsing them now", + self.grid_options["verbosity"], + 1, + ) # Grab the input and split them up, while accepting only non-empty entries cmdline_args = args.cmdline @@ -227,17 +251,17 @@ class Population: pass def add_grid_variable( - self, - name, - longname, - valuerange, - resolution, - spacingfunc, - probdist, - dphasevol, - parameter_name, - precode=None, - condition=None, + self, + name, + longname, + valuerange, + resolution, + spacingfunc, + probdist, + dphasevol, + parameter_name, + precode=None, + condition=None, ): """spec Function to add grid variables to the grid_options. @@ -295,7 +319,11 @@ class Population: # Load it into the grid_options self.grid_options["grid_variables"][grid_variable["name"]] = grid_variable - verbose_print("Added grid variable: {}".format(json.dumps(grid_variable, indent=4)), self.grid_options["verbosity"], 1) + verbose_print( + "Added grid variable: {}".format(json.dumps(grid_variable, indent=4)), + self.grid_options["verbosity"], + 1, + ) ################################################### # Return functions @@ -336,11 +364,11 @@ class Population: return self.defaults def return_all_info( - self, - include_population_settings=True, - include_binary_c_defaults=True, - include_binary_c_version_info=True, - include_binary_c_help_all=True, + self, + include_population_settings=True, + include_binary_c_defaults=True, + include_binary_c_version_info=True, + include_binary_c_help_all=True, ): """ Function that returns all the information about the population and binary_c @@ -370,13 +398,13 @@ class Population: return all_info def export_all_info( - self, - use_datadir=True, - outfile=None, - include_population_settings=True, - include_binary_c_defaults=True, - include_binary_c_version_info=True, - include_binary_c_help_all=True, + self, + use_datadir=True, + outfile=None, + include_population_settings=True, + include_binary_c_defaults=True, + include_binary_c_version_info=True, + include_binary_c_help_all=True, ): """ Function that exports the all_info to a json file @@ -419,18 +447,32 @@ class Population: self.custom_options["data_dir"], settings_name ) - verbose_print("Writing settings to {}".format(settings_fullname), self.grid_options["verbosity"], 1) + verbose_print( + "Writing settings to {}".format(settings_fullname), + self.grid_options["verbosity"], + 1, + ) # if not outfile.endswith('json'): with open(settings_fullname, "w") as file: file.write( - json.dumps(all_info_cleaned, indent=4, default=binaryc_json_serializer) + json.dumps( + all_info_cleaned, indent=4, default=binaryc_json_serializer + ) ) else: - verbose_print("Writing settings to {}".format(outfile), self.grid_options["verbosity"], 1) + verbose_print( + "Writing settings to {}".format(outfile), + self.grid_options["verbosity"], + 1, + ) # if not outfile.endswith('json'): with open(outfile, "w") as file: - file.write(json.dumps(all_info_cleaned, indent=4, default=binaryc_json_serializer)) + file.write( + json.dumps( + all_info_cleaned, indent=4, default=binaryc_json_serializer + ) + ) def set_custom_logging(self): """ @@ -439,7 +481,11 @@ class Population: """ # C_logging_code gets priority of C_autogen_code - verbose_print("Creating and loading custom logging functionality", self.grid_options["verbosity"], 1) + verbose_print( + "Creating and loading custom logging functionality", + self.grid_options["verbosity"], + 1, + ) if self.grid_options["C_logging_code"]: # Generate entire shared lib code around logging lines custom_logging_code = binary_c_log_code( @@ -486,9 +532,17 @@ class Population: """ for thread_nr in self.grid_options["amt_cores"]: - persistent_data_memaddr = binary_c_python_api.binary_c_return_persistent_data_memaddr() + persistent_data_memaddr = ( + binary_c_python_api.binary_c_return_persistent_data_memaddr() + ) self.persistent_data_memory_dict[thread_nr] = persistent_data_memaddr - verbose_print("Created the following dict with persistent memaddresses: {}".format(self.persistent_data_memory_dict), self.grid_options["verbosity"], 1) + verbose_print( + "Created the following dict with persistent memaddresses: {}".format( + self.persistent_data_memory_dict + ), + self.grid_options["verbosity"], + 1, + ) def free_persistent_data_memory_and_combine_results_and_output(self): """ @@ -503,22 +557,39 @@ class Population: for key in self.persistent_data_memory_dict: persistent_data_memaddr = self.persistent_data_memory_dict[key] - verbose_print("Freeing {} (thread {})and merging output to combined dict".format(persistent_data_memaddr, key), self.grid_options["verbosity"], 1) + verbose_print( + "Freeing {} (thread {})and merging output to combined dict".format( + persistent_data_memaddr, key + ), + self.grid_options["verbosity"], + 1, + ) # Get the output and decode it correctly to get the numbers correct - ensemble_json_output = binary_c_python_api.binary_c_free_persistent_data_memaddr_and_return_json_output(persistent_data_memaddr) - parsed_json = json.loads(ensemble_json_output.splitlines()[0][len("ENSEMBLE_JSON "):], cls=binarycDecoder) + ensemble_json_output = binary_c_python_api.binary_c_free_persistent_data_memaddr_and_return_json_output( + persistent_data_memaddr + ) + parsed_json = json.loads( + ensemble_json_output.splitlines()[0][len("ENSEMBLE_JSON ") :], + cls=binarycDecoder, + ) # Combine the output with the main output combined_ensemble_json = merge_dicts(combined_ensemble_json, parsed_json) # Write results to file. # TODO: Make sure everything is checked beforehand - full_output_filename = os.path.join(self.custom_options["data_dir"], self.custom_options["ensemble_output_name"]) - verbose_print("Writing ensemble output to {}".format(full_output_filename), self.grid_options["verbosity"], 1) + full_output_filename = os.path.join( + self.custom_options["data_dir"], self.custom_options["ensemble_output_name"] + ) + verbose_print( + "Writing ensemble output to {}".format(full_output_filename), + self.grid_options["verbosity"], + 1, + ) # Output to dir: - with open(full_output_filename, 'w') as output_file: + with open(full_output_filename, "w") as output_file: output_file.write(json.dumps(combined_ensemble_json, indent=4)) ################################################### @@ -559,7 +630,11 @@ class Population: ## check the settings: if self.bse_options["ensemble"] == 1: if not self.bse_options["ensemble_defer"] == 1: - verbose_print("Error, if you want to run an ensemble in a population, the output needs to be deferred", self.grid_options["verbosity"], 0) + verbose_print( + "Error, if you want to run an ensemble in a population, the output needs to be deferred", + self.grid_options["verbosity"], + 0, + ) raise ValueError ####################### @@ -628,7 +703,9 @@ class Population: # Unload functions # Unload store - binary_c_python_api.binary_c_free_store_memaddr(self.grid_options["store_memaddr"]) + binary_c_python_api.binary_c_free_store_memaddr( + self.grid_options["store_memaddr"] + ) def evolve_system_mp(self, binary_cmdline_string): """ @@ -677,7 +754,7 @@ class Population: # Get argument line argline = self.return_argline(self.bse_options) - verbose_print("Running {}".format(argline), self.grid_options['verbosity'], 1) + verbose_print("Running {}".format(argline), self.grid_options["verbosity"], 1) # Run system out = binary_c_python_api.run_system( @@ -775,8 +852,8 @@ class Population: # Evolve systems: via grid_options one can choose to do this linearly, or # multiprocessing method. if ( - self.grid_options["evolution_type"] - in self.grid_options["evolution_type_options"] + self.grid_options["evolution_type"] + in self.grid_options["evolution_type_options"] ): if self.grid_options["evolution_type"] == "mp": self.evolve_population_mp() @@ -880,8 +957,8 @@ class Population: code_string += indent * depth + "# setting probability lists\n" # Prepare the probability for grid_variable_el in sorted( - self.grid_options["grid_variables"].items(), - key=lambda x: x[1]["grid_variable_number"], + self.grid_options["grid_variables"].items(), + key=lambda x: x[1]["grid_variable_number"], ): # Make probabilities dict grid_variable = grid_variable_el[1] @@ -896,10 +973,10 @@ class Population: # Generate code print("Generating grid code") for loopnr, grid_variable_el in enumerate( - sorted( - self.grid_options["grid_variables"].items(), - key=lambda x: x[1]["grid_variable_number"], - ) + sorted( + self.grid_options["grid_variables"].items(), + key=lambda x: x[1]["grid_variable_number"], + ) ): print("Constructing/adding: {}".format(grid_variable_el[0])) grid_variable = grid_variable_el[1] @@ -1142,11 +1219,11 @@ class Population: # this has to go in a reverse order: # Here comes the stuff that is put after the deepest nested part that calls returns stuff. for loopnr, grid_variable_el in enumerate( - sorted( - self.grid_options["grid_variables"].items(), - key=lambda x: x[1]["grid_variable_number"], - reverse=True, - ) + sorted( + self.grid_options["grid_variables"].items(), + key=lambda x: x[1]["grid_variable_number"], + reverse=True, + ) ): grid_variable = grid_variable_el[1] code_string += indent * (depth + 1) + "#" * 40 + "\n" @@ -1185,7 +1262,9 @@ class Population: # Stop of code generation. Here the code is saved and written # Save the gridcode to the grid_options - verbose_print("Saving grid code to grid_options", self.grid_options["verbosity"], 1) + verbose_print( + "Saving grid code to grid_options", self.grid_options["verbosity"], 1 + ) self.grid_options["code_string"] = code_string @@ -1195,7 +1274,11 @@ class Population: ) self.grid_options["gridcode_filename"] = gridcode_filename - verbose_print("Writing grid code to {}".format(gridcode_filename), self.grid_options["verbosity"], 1) + verbose_print( + "Writing grid code to {}".format(gridcode_filename), + self.grid_options["verbosity"], + 1, + ) with open(gridcode_filename, "w") as file: file.write(code_string) @@ -1211,7 +1294,9 @@ class Population: message="Loading grid code function from {}".format( self.grid_options["gridcode_filename"] ), - verbosity=self.grid_options["verbosity"], minimal_verbosity=1) + verbosity=self.grid_options["verbosity"], + minimal_verbosity=1, + ) spec = importlib.util.spec_from_file_location( "binary_c_python_grid", @@ -1290,7 +1375,7 @@ class Population: ################################################### def write_binary_c_calls_to_file( - self, output_dir=None, output_filename=None, include_defaults=False + self, output_dir=None, output_filename=None, include_defaults=False ): """ Function that loops over the gridcode and writes the generated parameters to a file. @@ -1386,7 +1471,11 @@ class Population: """ if evol_type == "single": - verbose_print("Cleaning up the custom logging stuff. type: single", self.grid_options["verbosity"], 1) + verbose_print( + "Cleaning up the custom logging stuff. type: single", + self.grid_options["verbosity"], + 1, + ) # TODO: Unset custom logging code @@ -1401,7 +1490,11 @@ class Population: ) if evol_type == "population": - verbose_print("Cleaning up the custom logging stuffs. type: population", self.grid_options["verbosity"], 1) + verbose_print( + "Cleaning up the custom logging stuffs. type: population", + self.grid_options["verbosity"], + 1, + ) # TODO: make sure that these also work. not fully sure if necessary tho. # whether its a single file, or a dict of files/memaddresses @@ -1422,7 +1515,6 @@ class Population: """ self.grid_options["count"] += 1 - def set_loggers(self): """ Function to set the loggers for the execution of the grid @@ -1435,7 +1527,7 @@ class Population: os.makedirs(os.path.dirname(binary_c_logfile), exist_ok=True) # Set up logger - self.logger = logging.getLogger('binary_c_python_logger') + self.logger = logging.getLogger("binary_c_python_logger") self.logger.setLevel(self.grid_options["verbosity"]) # Reset handlers @@ -1443,14 +1535,12 @@ class Population: # Set formatting of output log_formatter = logging.Formatter( - '%(asctime)s - %(name)s - %(levelname)s - %(message)s' + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" ) # Make and add filehandlers # make handler for output to file - handler_file = logging.FileHandler( - filename=os.path.join(binary_c_logfile) - ) + handler_file = logging.FileHandler(filename=os.path.join(binary_c_logfile)) handler_file.setFormatter(log_formatter) handler_file.setLevel(logging.INFO) @@ -1463,10 +1553,6 @@ class Population: self.logger.addHandler(handler_file) self.logger.addHandler(handler_stdout) - - - - # def join_result_dicts(self): # """ # Function to join the result dictionaries diff --git a/binarycpython/utils/grid_options_defaults.py b/binarycpython/utils/grid_options_defaults.py index bed3b9fabf5ef1a4e80d0a632bd2be389424bd63..ea33f57d41e981fb4514b7eca1025dec30e2a125 100644 --- a/binarycpython/utils/grid_options_defaults.py +++ b/binarycpython/utils/grid_options_defaults.py @@ -19,8 +19,10 @@ grid_options_defaults_dict = { ########################## # Execution log: ########################## - "verbosity": 0, # Level of verbosity of the simulation. 0=INFO, - "log_file": os.path.join(temp_dir(), 'binary_c_python.log'), # Set to None to not log to file. The directory will be created + "verbosity": 0, # Level of verbosity of the simulation. 0=INFO, + "log_file": os.path.join( + temp_dir(), "binary_c_python.log" + ), # Set to None to not log to file. The directory will be created ########################## # binary_c files ########################## @@ -47,7 +49,7 @@ grid_options_defaults_dict = { # Store pre-loading: ########################## "store_memaddr": -1, # Contains the store object memory adress, useful for preloading. - # defaults to -1 and isnt used if thats the default then. + # defaults to -1 and isnt used if thats the default then. ########################## # Log args: logging of arguments ########################## @@ -67,7 +69,7 @@ grid_options_defaults_dict = { "population_type": "grid", # "population_type_options": [ "grid", - ], # TODO: fill later with monte carlo, source file etc + ], # Available choices for type of population generation # TODO: fill later with monte carlo, source file etc "count": 0, # total count of systems "probtot": 0, # total probability "weight": 1.0, # weighting for the probability @@ -82,7 +84,7 @@ grid_options_defaults_dict = { "modulo": 1, # run modulo n of the grid. ## Grid type evolution "grid_variables": {}, # grid variables - "grid_code": None, # literal grid code + "grid_code": None, # literal grid code: contains the whole script that'll be written to a file "gridcode_filename": None, # filename of gridcode ## Monte carlo type evolution # TODO: make MC options @@ -135,9 +137,9 @@ grid_options_defaults_dict = { # # which means it allocates all the CPUs in a node to the job # slurm_control_CPUs=>0, # if so, leave this many for Perl control (0) # slurm_array=>undef,# override for --array, useful for rerunning jobs - # ######################################## - # # Condor stuff - # ######################################## + ######################################## + # Condor stuff + ######################################## # condor=>0, # 1 to use condor, 0 otherwise # condor_command=>'',# condor command e.g. "run_flexigrid", # # "join_datafiles" diff --git a/binarycpython/utils/plot_functions.py b/binarycpython/utils/plot_functions.py index b6186bc03e1440e048fbd76b07b0f934cf10dcf4..e18d26a50e7483711a3f71e73fc1b3e5889fefc4 100644 --- a/binarycpython/utils/plot_functions.py +++ b/binarycpython/utils/plot_functions.py @@ -78,10 +78,14 @@ Printf("HR_PLOTTING %30.12e %d %d %g %g %g %g %g %g\\n", ); """ + def color_by_index(row, column, colors): return colors[int(row[column])] -def plot_HR_diagram(df, show_stellar_types=False, show_plot=True, use_astropy_values=True): + +def plot_HR_diagram( + df, show_stellar_types=False, show_plot=True, use_astropy_values=True +): """ Function to plot the HR diagram evolution of the system. Assumes its a binary system. @@ -114,14 +118,22 @@ def plot_HR_diagram(df, show_stellar_types=False, show_plot=True, use_astropy_va R_SUN = const.R_sun.cgs.value L_SUN = const.L_sun.cgs.value omega_sb = const.sigma_sb.cgs.value - print('Using astropy values: R_SUN= {} L_SUN = {} omega_sb = {}'.format(R_SUN, L_SUN, omega_sb)) + print( + "Using astropy values: R_SUN= {} L_SUN = {} omega_sb = {}".format( + R_SUN, L_SUN, omega_sb + ) + ) else: - R_SUN = 6.956600000000000000000000000000e+10 - L_SUN = 3.851500000000000274321803705319e+33 + R_SUN = 6.956600000000000000000000000000e10 + L_SUN = 3.851500000000000274321803705319e33 omega_sb = 5.670352798655924736991040813194e-05 - print('Using binary_c values: R_SUN= {} L_SUN = {} omega_sb = {}'.format(R_SUN, L_SUN, omega_sb)) + print( + "Using binary_c values: R_SUN= {} L_SUN = {} omega_sb = {}".format( + R_SUN, L_SUN, omega_sb + ) + ) - prefactor = (1/(4 * math.pi * omega_sb))**(1.0/4) + prefactor = (1 / (4 * math.pi * omega_sb)) ** (1.0 / 4) if show_stellar_types: fig, ax = plt.subplots(ncols=1, nrows=1, figsize=(20, 20)) @@ -129,46 +141,62 @@ def plot_HR_diagram(df, show_stellar_types=False, show_plot=True, use_astropy_va else: fig, ax = plt.subplots(ncols=1, nrows=1, figsize=(20, 20)) - df = df.assign(teff_1=prefactor * ((df['luminosity_1'] * L_SUN)/((df['radius_1'] * R_SUN) ** 2)) ** (1.0/4)) - df = df.assign(teff_2=prefactor * ((df['luminosity_2'] * L_SUN)/((df['radius_2'] * R_SUN) ** 2)) ** (1.0/4)) + df = df.assign( + teff_1=prefactor + * ((df["luminosity_1"] * L_SUN) / ((df["radius_1"] * R_SUN) ** 2)) ** (1.0 / 4) + ) + df = df.assign( + teff_2=prefactor + * ((df["luminosity_2"] * L_SUN) / ((df["radius_2"] * R_SUN) ** 2)) ** (1.0 / 4) + ) - # # Add colors to dataframe - # df = df.assign(colors_1=df.apply(color_by_index, axis=1, args=('stellar_type_1', colors))) - # df = df.assign(colors_2=df.apply(color_by_index, axis=1, args=('stellar_type_2', colors))) + # # Add colors to dataframe + # df = df.assign(colors_1=df.apply(color_by_index, axis=1, args=('stellar_type_1', colors))) + # df = df.assign(colors_2=df.apply(color_by_index, axis=1, args=('stellar_type_2', colors))) # Star 1: fig.axes[0].scatter( - df['teff_1'], - df['luminosity_1'], - label='Star 1 (M={})'.format(df['pms_mass_1'].values.tolist()[0]), - # color=df['colors_1'] - ) + df["teff_1"], + df["luminosity_1"], + label="Star 1 (M={})".format(df["pms_mass_1"].values.tolist()[0]), + # color=df['colors_1'] + ) # Star 2: fig.axes[0].scatter( - df['teff_2'], - df['luminosity_2'], - label='Star 2 (M={})'.format(df['pms_mass_2'].values.tolist()[0]), - # color=df['colors_2'] - ) + df["teff_2"], + df["luminosity_2"], + label="Star 2 (M={})".format(df["pms_mass_2"].values.tolist()[0]), + # color=df['colors_2'] + ) margin_fraction_x = 0.1 margin_fraction_y = 0.9 # Fix axes - min_y, max_y = df[['luminosity_1', 'luminosity_2']].min().min(), df[['luminosity_1', 'luminosity_2']].max().max() - min_x, max_x = df[['teff_1', 'teff_2']].min().min(), df[['teff_1', 'teff_2']].max().max() - fig.axes[0].set_xlim(min_x * (1-margin_fraction_x), max_x * (1 + margin_fraction_x)) - fig.axes[0].set_ylim(min_y * (1-margin_fraction_y), max_y * (1 + margin_fraction_y)) - fig.axes[0].set_yscale('log') - fig.axes[0].set_xscale('log') + min_y, max_y = ( + df[["luminosity_1", "luminosity_2"]].min().min(), + df[["luminosity_1", "luminosity_2"]].max().max(), + ) + min_x, max_x = ( + df[["teff_1", "teff_2"]].min().min(), + df[["teff_1", "teff_2"]].max().max(), + ) + fig.axes[0].set_xlim( + min_x * (1 - margin_fraction_x), max_x * (1 + margin_fraction_x) + ) + fig.axes[0].set_ylim( + min_y * (1 - margin_fraction_y), max_y * (1 + margin_fraction_y) + ) + fig.axes[0].set_yscale("log") + fig.axes[0].set_xscale("log") fig.axes[0].invert_xaxis() # Other stuff - fig.axes[0].set_title('HR diagram') - fig.axes[0].legend(loc='best') - fig.axes[0].set_ylabel(r'Luminosity [$L_{star}$/$L_{\odot}$]') - fig.axes[0].set_xlabel(r'$T_{eff}$ (K)') + fig.axes[0].set_title("HR diagram") + fig.axes[0].legend(loc="best") + fig.axes[0].set_ylabel(r"Luminosity [$L_{star}$/$L_{\odot}$]") + fig.axes[0].set_xlabel(r"$T_{eff}$ (K)") # Show or return if show_plot: @@ -176,6 +204,7 @@ def plot_HR_diagram(df, show_stellar_types=False, show_plot=True, use_astropy_va else: return fig + def plot_orbit(df, show_stellar_types=False, show_plot=True): """ Function to plot the orbital elements of the system @@ -190,33 +219,31 @@ def plot_orbit(df, show_stellar_types=False, show_plot=True): fig, ax = plt.subplots(ncols=1, nrows=4, figsize=(20, 10)) fig.subplots_adjust(hspace=0) else: - fig, ax = plt.subplots(ncols=1, nrows=3, figsize=(20, 10), sharex=True) + fig, ax = plt.subplots(ncols=1, nrows=3, figsize=(20, 10), sharex=True) fig.subplots_adjust(hspace=0) + # + fig.axes[0].plot(df["time"], df["orbital_period"], label="Orbital period") - # - fig.axes[0].plot(df['time'], df['orbital_period'], label='Orbital period') - - fig.axes[1].plot(df['time'], df['separation'], label='Separation orbit') - - fig.axes[2].plot(df['time'], df['eccentricity'], label='Eccentricity orbit') + fig.axes[1].plot(df["time"], df["separation"], label="Separation orbit") + fig.axes[2].plot(df["time"], df["eccentricity"], label="Eccentricity orbit") # Make up - fig.axes[0].set_title('Orbital elements evolution') + fig.axes[0].set_title("Orbital elements evolution") - fig.axes[0].legend(loc='best') - fig.axes[1].legend(loc='best') - fig.axes[2].legend(loc='best') + fig.axes[0].legend(loc="best") + fig.axes[1].legend(loc="best") + fig.axes[2].legend(loc="best") # fig.axes[0].set_ylim(0, 1.1*max_total_mass) - fig.axes[0].set_ylabel(r'Orbital period') - fig.axes[1].set_ylabel(r'Separation') - fig.axes[2].set_ylabel(r'Eccentricity') - fig.axes[0].set_yscale('log') - fig.axes[1].set_yscale('log') + fig.axes[0].set_ylabel(r"Orbital period") + fig.axes[1].set_ylabel(r"Separation") + fig.axes[2].set_ylabel(r"Eccentricity") + fig.axes[0].set_yscale("log") + fig.axes[1].set_yscale("log") - fig.axes[2].set_xlabel(r'Time (Myr)') + fig.axes[2].set_xlabel(r"Time (Myr)") # Show or return if show_plot: @@ -224,6 +251,7 @@ def plot_orbit(df, show_stellar_types=False, show_plot=True): else: return fig + def plot_masses(df, show_stellar_types=False, show_plot=True): """ Function to plot the masses of the system. @@ -251,60 +279,50 @@ def plot_masses(df, show_stellar_types=False, show_plot=True): else: fig, ax = plt.subplots(ncols=1, nrows=1, figsize=(20, 10)) - max_total_mass = df['pms_mass_1'].values.tolist()[0] + df['pms_mass_2'].values.tolist()[0] + max_total_mass = ( + df["pms_mass_1"].values.tolist()[0] + df["pms_mass_2"].values.tolist()[0] + ) - df['total_mass_temp'] = df['mass_1'] + df['mass_2'] - df['pms_total_mass_temp'] = max_total_mass + df["total_mass_temp"] = df["mass_1"] + df["mass_2"] + df["pms_total_mass_temp"] = max_total_mass # total mass - fig.axes[0].plot( - df['time'], - df['total_mass_temp'], - label='Total mass' - ) + fig.axes[0].plot(df["time"], df["total_mass_temp"], label="Total mass") fig.axes[0].axhline( - df['pms_total_mass_temp'].values.tolist()[0], - label='Initial total mass', - linestyle='--', - alpha=0.5 + df["pms_total_mass_temp"].values.tolist()[0], + label="Initial total mass", + linestyle="--", + alpha=0.5, ) # Mass 1 - fig.axes[0].plot( - df['time'], - df['mass_1'], - label='star 1' - ) + fig.axes[0].plot(df["time"], df["mass_1"], label="star 1") fig.axes[0].axhline( - df['pms_mass_1'].values.tolist()[0], - color='red', - linestyle='--', + df["pms_mass_1"].values.tolist()[0], + color="red", + linestyle="--", linewidth=2, - label='Initial mass 1', - alpha=0.5 + label="Initial mass 1", + alpha=0.5, ) # mass 2 - fig.axes[0].plot( - df['time'], - df['mass_2'], - color='orange', - label='star 2') + fig.axes[0].plot(df["time"], df["mass_2"], color="orange", label="star 2") fig.axes[0].axhline( - df['pms_mass_2'].values.tolist()[0], - color='red', - linestyle='--', + df["pms_mass_2"].values.tolist()[0], + color="red", + linestyle="--", linewidth=2, - label='Initial mass 2', - alpha=0.5 + label="Initial mass 2", + alpha=0.5, ) # Make up - fig.axes[0].set_title('Stellar mass evolution') - fig.axes[0].legend(loc='best') - fig.axes[0].set_ylim(0, 1.1*max_total_mass) - fig.axes[0].set_ylabel(r'Mass [$M_{\odot}$]') - fig.axes[0].set_xlabel(r'Time (Myr)') + fig.axes[0].set_title("Stellar mass evolution") + fig.axes[0].legend(loc="best") + fig.axes[0].set_ylim(0, 1.1 * max_total_mass) + fig.axes[0].set_ylabel(r"Mass [$M_{\odot}$]") + fig.axes[0].set_xlabel(r"Time (Myr)") # Show or return if show_plot: @@ -318,6 +336,7 @@ def dummy(): """Placeholder""" pass + def parse_function_hr_diagram(output): """ Parsing function for the HR plotting routine @@ -357,6 +376,7 @@ def parse_function_hr_diagram(output): return df + def parse_function_orbit(output): """ Parsing function for the orbit plotting routine @@ -387,6 +407,7 @@ def parse_function_orbit(output): return df + def parse_function_masses(output): """ Parsing function for the orbit plotting routine @@ -517,6 +538,7 @@ def plot_system(plot_type, **kwargs): if not show_plot: return fig + # from david_phd_functions.plotting.custom_mpl_settings import load_mpl_rc # load_mpl_rc() @@ -531,4 +553,4 @@ fig = plot_system( ) # fig.axes[0].set_xlim(0, 150) -# plt.show() \ No newline at end of file +# plt.show() diff --git a/setup.py b/setup.py index 349c368029212989033b7f0f926b6fd7b2e26f55..6d1e39302208f78c9ae82afeece6741603526730 100644 --- a/setup.py +++ b/setup.py @@ -16,21 +16,26 @@ def readme(): with open("README.md") as file: return file.read() + def license(): """Opens license file and returns the content""" with open("LICENSE.md") as file: return file.read() + def check_version(installed_binary_c_version, required_binary_c_versions): """Function to check the installed version and compare it to the required version""" message = """ The binary_c version that is installed ({}) does not match the binary_c versions ({}) that this release of the binary_c python module requires. - """.format(installed_binary_c_version, required_binary_c_versions) + """.format( + installed_binary_c_version, required_binary_c_versions + ) assert installed_binary_c_version in required_binary_c_versions, message + ### -REQUIRED_BINARY_C_VERSIONS = ['2.1.7'] +REQUIRED_BINARY_C_VERSIONS = ["2.1.7"] #### GSL_DIR = os.getenv("GSL_DIR", None) @@ -55,9 +60,7 @@ CWD = os.getcwd() BINARY_C_CONFIG = os.path.join(BINARY_C_DIR, "binary_c-config") BINARY_C_VERSION = ( - subprocess.run( - [BINARY_C_CONFIG, "version"], stdout=subprocess.PIPE, check=True - ) + subprocess.run([BINARY_C_CONFIG, "version"], stdout=subprocess.PIPE, check=True) .stdout.decode("utf-8") .split() ) @@ -189,8 +192,9 @@ setup( It is tested and designed to work for versions {}, we can't guarantee proper functioning for other versions If you want to use a different version of binary_c, download and install a different version of this package - """.format(str(REQUIRED_BINARY_C_VERSIONS), str(REQUIRED_BINARY_C_VERSIONS)), - + """.format( + str(REQUIRED_BINARY_C_VERSIONS), str(REQUIRED_BINARY_C_VERSIONS) + ), author="David Hendriks, Robert Izzard and Jeff Andrews", author_email="davidhendriks93@gmail.com/d.hendriks@surrey.ac.uk,\ r.izzard@surrey.ac.uk/rob.izzard@gmail.com andrews@physics.uoc.gr", diff --git a/tests/core/test_persistent_data.py b/tests/core/test_persistent_data.py index 9d20bad4c29781b7bc4092d6b5ad3633d283955f..4ca955cccd0b3882d20a7f49f463225627b0389e 100644 --- a/tests/core/test_persistent_data.py +++ b/tests/core/test_persistent_data.py @@ -10,18 +10,29 @@ import textwrap import binary_c_python_api from binarycpython.utils.functions import ( - binarycDecoder, + binarycDecoder, temp_dir, inspect_dict, merge_dicts, ) TMP_DIR = temp_dir() -os.makedirs(os.path.join(TMP_DIR, "test"), exist_ok=True) +os.makedirs(os.path.join(TMP_DIR, "test"), exist_ok=True) #### -def return_argstring(m1=15.0, m2=14.0, separation=0, orbital_period=453000000000, eccentricity=0.0, metallicity=0.02, max_evolution_time=15000, defer_ensemble=0, ensemble_filters_off=1, ensemble_filter='SUPERNOVAE'): +def return_argstring( + m1=15.0, + m2=14.0, + separation=0, + orbital_period=453000000000, + eccentricity=0.0, + metallicity=0.02, + max_evolution_time=15000, + defer_ensemble=0, + ensemble_filters_off=1, + ensemble_filter="SUPERNOVAE", +): """ Function to make a argstring that we can use in these tests """ @@ -31,11 +42,22 @@ def return_argstring(m1=15.0, m2=14.0, separation=0, orbital_period=453000000000 eccentricity {4:g} metallicity {5:g} max_evolution_time {6:g} ensemble 1 ensemble_defer {7} \ ensemble_filters_off {8} ensemble_filter_{9} 1 probability 0.1" - argstring = argstring_template.format(m1, m2, separation, orbital_period, eccentricity, - metallicity, max_evolution_time, defer_ensemble, ensemble_filters_off, ensemble_filter) + argstring = argstring_template.format( + m1, + m2, + separation, + orbital_period, + eccentricity, + metallicity, + max_evolution_time, + defer_ensemble, + ensemble_filters_off, + ensemble_filter, + ) return argstring + def test_return_persistent_data_memaddr(): output = binary_c_python_api.return_persistent_data_memaddr() @@ -43,6 +65,7 @@ def test_return_persistent_data_memaddr(): print("Binary_c output:") print(textwrap.indent(str(output), "\t")) + def test_passing_persistent_data_to_run_system(): # Function to test the passing of the persistent data memoery adress, and having ensemble_defer = True # We should see that the results of multiple systems have been added to the one output json @@ -52,26 +75,41 @@ def test_passing_persistent_data_to_run_system(): argstring_1_deferred = return_argstring(defer_ensemble=1) argstring_2 = return_argstring(defer_ensemble=0) - # + # persistent_data_memaddr = binary_c_python_api.return_persistent_data_memaddr() output_1 = binary_c_python_api.run_system(argstring=argstring_1) - ensemble_jsons_1 = [line for line in output_1.splitlines() if line.startswith("ENSEMBLE_JSON")] - json_1 = json.loads(ensemble_jsons_1[0][len("ENSEMBLE_JSON "):]) + ensemble_jsons_1 = [ + line for line in output_1.splitlines() if line.startswith("ENSEMBLE_JSON") + ] + json_1 = json.loads(ensemble_jsons_1[0][len("ENSEMBLE_JSON ") :]) # Doing 2 systems in a row. - output_1_deferred = binary_c_python_api.run_system(argstring=argstring_1_deferred, persistent_data_memaddr=persistent_data_memaddr) - output_2 = binary_c_python_api.run_system(argstring=argstring_2, persistent_data_memaddr=persistent_data_memaddr) - ensemble_jsons_2 = [line for line in output_2.splitlines() if line.startswith("ENSEMBLE_JSON")] - json_2 = json.loads(ensemble_jsons_2[0][len("ENSEMBLE_JSON "):]) + output_1_deferred = binary_c_python_api.run_system( + argstring=argstring_1_deferred, persistent_data_memaddr=persistent_data_memaddr + ) + output_2 = binary_c_python_api.run_system( + argstring=argstring_2, persistent_data_memaddr=persistent_data_memaddr + ) + ensemble_jsons_2 = [ + line for line in output_2.splitlines() if line.startswith("ENSEMBLE_JSON") + ] + json_2 = json.loads(ensemble_jsons_2[0][len("ENSEMBLE_JSON ") :]) # Doing system one again. output_1_again = binary_c_python_api.run_system(argstring=argstring_1) - ensemble_jsons_1 = [line for line in output_1_again.splitlines() if line.startswith("ENSEMBLE_JSON")] - json_1_again = json.loads(ensemble_jsons_1[0][len("ENSEMBLE_JSON "):]) + ensemble_jsons_1 = [ + line for line in output_1_again.splitlines() if line.startswith("ENSEMBLE_JSON") + ] + json_1_again = json.loads(ensemble_jsons_1[0][len("ENSEMBLE_JSON ") :]) + + assert ( + json_1 == json_1_again + ), "The system with the same initial settings did not give the same output" + assert ( + json_1 != json_2 + ), "The output of the deferred two systems should not be the same as the first undeferred output" - assert json_1 == json_1_again, "The system with the same initial settings did not give the same output" - assert json_1 != json_2, "The output of the deferred two systems should not be the same as the first undeferred output" def test_full_ensemble_output(): """ @@ -80,16 +118,20 @@ def test_full_ensemble_output(): argstring_1 = return_argstring(defer_ensemble=0, ensemble_filters_off=0) output_1 = binary_c_python_api.run_system(argstring=argstring_1) - ensemble_jsons_1 = [line for line in output_1.splitlines() if line.startswith("ENSEMBLE_JSON")] + ensemble_jsons_1 = [ + line for line in output_1.splitlines() if line.startswith("ENSEMBLE_JSON") + ] start = time.time() - json_1 = json.loads(ensemble_jsons_1[0][len("ENSEMBLE_JSON "):], cls=binarycDecoder) + json_1 = json.loads( + ensemble_jsons_1[0][len("ENSEMBLE_JSON ") :], cls=binarycDecoder + ) stop = time.time() - with open(os.path.join(TMP_DIR, "test", "json_full_ensemble.json"), 'w') as f: + with open(os.path.join(TMP_DIR, "test", "json_full_ensemble.json"), "w") as f: f.write(json.dumps(json_1, indent=4)) - print("took {}s to decode".format(stop-start)) + print("took {}s to decode".format(stop - start)) print("Size of the json in memory: {}".format(sys.getsizeof(json_1))) # assert statements: @@ -100,6 +142,7 @@ def test_full_ensemble_output(): assert "distributions" in json_1.keys() assert "scalars" in json_1.keys() + def test_adding_ensemble_output(): """ Function that adds the output of 2 ensembles and compares it to the output that we get by deferring the first output @@ -114,10 +157,15 @@ def test_adding_ensemble_output(): # results are returned directly after the run # Direct output commands - argstring_1 = return_argstring(m1=m1, m2=m2, - ensemble_filter="STELLAR_TYPE_COUNTS", defer_ensemble=0) - argstring_2 = return_argstring(m1=m1 + extra_mass, m2=m2, - ensemble_filter="STELLAR_TYPE_COUNTS", defer_ensemble=0) + argstring_1 = return_argstring( + m1=m1, m2=m2, ensemble_filter="STELLAR_TYPE_COUNTS", defer_ensemble=0 + ) + argstring_2 = return_argstring( + m1=m1 + extra_mass, + m2=m2, + ensemble_filter="STELLAR_TYPE_COUNTS", + defer_ensemble=0, + ) # Get outputs output_1 = binary_c_python_api.run_system(argstring=argstring_1) @@ -131,17 +179,19 @@ def test_adding_ensemble_output(): ] test_1_json_1 = json.loads( - test_1_ensemble_jsons_1[0][len("ENSEMBLE_JSON "):], cls=binarycDecoder) + test_1_ensemble_jsons_1[0][len("ENSEMBLE_JSON ") :], cls=binarycDecoder + ) test_1_json_2 = json.loads( - test_1_ensemble_jsons_2[0][len("ENSEMBLE_JSON "):], cls=binarycDecoder) + test_1_ensemble_jsons_2[0][len("ENSEMBLE_JSON ") :], cls=binarycDecoder + ) test_1_merged_dict = merge_dicts(test_1_json_1, test_1_json_2) - with open(os.path.join(TMP_DIR, "test", "adding_json_1.json"), 'w') as file: + with open(os.path.join(TMP_DIR, "test", "adding_json_1.json"), "w") as file: file.write(json.dumps(test_1_json_1, indent=4)) - with open(os.path.join(TMP_DIR, "test", "adding_json_2.json"), 'w') as file: + with open(os.path.join(TMP_DIR, "test", "adding_json_2.json"), "w") as file: file.write(json.dumps(test_1_json_2, indent=4)) - with open(os.path.join(TMP_DIR, "test", "adding_json_merged.json"), 'w') as file: + with open(os.path.join(TMP_DIR, "test", "adding_json_merged.json"), "w") as file: file.write(json.dumps(test_1_json_2, indent=4)) print("Single runs done\n") @@ -153,30 +203,42 @@ def test_adding_ensemble_output(): # Deferred commands argstring_1_deferred = return_argstring( - m1=m1, m2=m2, ensemble_filter="STELLAR_TYPE_COUNTS", defer_ensemble=1) + m1=m1, m2=m2, ensemble_filter="STELLAR_TYPE_COUNTS", defer_ensemble=1 + ) argstring_2_deferred = return_argstring( - m1=m1 + extra_mass, m2=m2, ensemble_filter="STELLAR_TYPE_COUNTS", defer_ensemble=1) + m1=m1 + extra_mass, + m2=m2, + ensemble_filter="STELLAR_TYPE_COUNTS", + defer_ensemble=1, + ) # Get a memory location - test_2_persistent_data_memaddr = binary_c_python_api.return_persistent_data_memaddr() + test_2_persistent_data_memaddr = ( + binary_c_python_api.return_persistent_data_memaddr() + ) # Run the systems and defer the output each time _ = binary_c_python_api.run_system( argstring=argstring_1_deferred, - persistent_data_memaddr=test_2_persistent_data_memaddr + persistent_data_memaddr=test_2_persistent_data_memaddr, ) _ = binary_c_python_api.run_system( argstring=argstring_2_deferred, - persistent_data_memaddr=test_2_persistent_data_memaddr + persistent_data_memaddr=test_2_persistent_data_memaddr, ) # Have the persistent_memory adress be released and have the json outputted test_2_output = binary_c_python_api.free_persistent_data_memaddr_and_return_json_output( - test_2_persistent_data_memaddr) - test_2_ensemble_json = [line for line in test_2_output.splitlines() if line.startswith("ENSEMBLE_JSON")] - test_2_json = json.loads(test_2_ensemble_json[0][len("ENSEMBLE_JSON "):], cls=binarycDecoder) + test_2_persistent_data_memaddr + ) + test_2_ensemble_json = [ + line for line in test_2_output.splitlines() if line.startswith("ENSEMBLE_JSON") + ] + test_2_json = json.loads( + test_2_ensemble_json[0][len("ENSEMBLE_JSON ") :], cls=binarycDecoder + ) - with open(os.path.join(TMP_DIR, "test", "adding_json_deferred.json"), 'w') as file: + with open(os.path.join(TMP_DIR, "test", "adding_json_deferred.json"), "w") as file: file.write(json.dumps(test_2_json, indent=4)) print("Double deferred done\n") @@ -186,29 +248,36 @@ def test_adding_ensemble_output(): # Then the second one uses that memory to combine its results with, but doesn't defer the # data after that, so it will print it after the second run is done - test_3_persistent_data_memaddr = binary_c_python_api.return_persistent_data_memaddr() + test_3_persistent_data_memaddr = ( + binary_c_python_api.return_persistent_data_memaddr() + ) # Run the systems and defer the output once and the second time not, so that the second run # automatically prints out the results _ = binary_c_python_api.run_system( argstring=argstring_1_deferred, - persistent_data_memaddr=test_3_persistent_data_memaddr + persistent_data_memaddr=test_3_persistent_data_memaddr, ) test_3_output_2 = binary_c_python_api.run_system( - argstring=argstring_2, - persistent_data_memaddr=test_3_persistent_data_memaddr + argstring=argstring_2, persistent_data_memaddr=test_3_persistent_data_memaddr ) test_3_ensemble_jsons = [ - line for line in test_3_output_2.splitlines() if line.startswith("ENSEMBLE_JSON") + line + for line in test_3_output_2.splitlines() + if line.startswith("ENSEMBLE_JSON") ] - test_3_json = json.loads(test_3_ensemble_jsons[0][len("ENSEMBLE_JSON "):], cls=binarycDecoder) + test_3_json = json.loads( + test_3_ensemble_jsons[0][len("ENSEMBLE_JSON ") :], cls=binarycDecoder + ) - with open(os.path.join(TMP_DIR, "test", "adding_json_deferred_and_output.json"), 'w') as f: + with open( + os.path.join(TMP_DIR, "test", "adding_json_deferred_and_output.json"), "w" + ) as f: f.write(json.dumps(test_3_json, indent=4)) print("Single deferred done\n") - # + # assert_message_1 = """ The structure of the manually merged is not the same as the merged by double deferring """ @@ -218,18 +287,28 @@ def test_adding_ensemble_output(): """ # - assert inspect_dict(test_1_merged_dict, print_structure=False) == inspect_dict(test_2_json, print_structure=False), assert_message_1 + assert inspect_dict(test_1_merged_dict, print_structure=False) == inspect_dict( + test_2_json, print_structure=False + ), assert_message_1 # assert inspect_dict(test_1_merged_dict, print_structure=False) == inspect_dict(test_3_json, print_structure=False), assert_message_2 + def combine_with_empty_json(): argstring_1 = return_argstring(defer_ensemble=0) output_1 = binary_c_python_api.run_system(argstring=argstring_1) - ensemble_jsons_1 = [line for line in output_1.splitlines() if line.startswith("ENSEMBLE_JSON")] - json_1 = json.loads(ensemble_jsons_1[0][len("ENSEMBLE_JSON "):], cls=binarycDecoder) + ensemble_jsons_1 = [ + line for line in output_1.splitlines() if line.startswith("ENSEMBLE_JSON") + ] + json_1 = json.loads( + ensemble_jsons_1[0][len("ENSEMBLE_JSON ") :], cls=binarycDecoder + ) - assert_message = "combining output json with empty dict should give same result as initial json" + assert_message = ( + "combining output json with empty dict should give same result as initial json" + ) assert merge_dicts(json_1, {}) == json_1, assert_message + def test_free_and_json_output(): """ Function that tests the freeing of the memory adress and the output of the json @@ -239,27 +318,35 @@ def test_free_and_json_output(): m2 = 0.1 # Msun # Get argstring: - argstring_1 = return_argstring(m1=m2, m2=m2, ensemble_filter="STELLAR_TYPE_COUNTS", defer_ensemble=1) + argstring_1 = return_argstring( + m1=m2, m2=m2, ensemble_filter="STELLAR_TYPE_COUNTS", defer_ensemble=1 + ) # Get a memory adress: persistent_data_memaddr = binary_c_python_api.return_persistent_data_memaddr() # Evolve and defer output print("evolving") - output_1_deferred = binary_c_python_api.run_system(argstring=argstring_1, persistent_data_memaddr=persistent_data_memaddr) + output_1_deferred = binary_c_python_api.run_system( + argstring=argstring_1, persistent_data_memaddr=persistent_data_memaddr + ) print("Evolved") print("Output:") print(textwrap.indent(str(output_1_deferred), "\t")) # Free memory adress print("freeing") - json_output_by_freeing = binary_c_python_api.free_persistent_data_memaddr_and_return_json_output(persistent_data_memaddr) + json_output_by_freeing = binary_c_python_api.free_persistent_data_memaddr_and_return_json_output( + persistent_data_memaddr + ) print("Freed") print("Output:") print(textwrap.indent(str(json_output_by_freeing), "\t")) - - parsed_json = json.loads(json_output_by_freeing.splitlines()[0][len("ENSEMBLE_JSON "):], cls=binarycDecoder) + parsed_json = json.loads( + json_output_by_freeing.splitlines()[0][len("ENSEMBLE_JSON ") :], + cls=binarycDecoder, + ) print(parsed_json) # ensemble_jsons_1 = [line for line in output_1.splitlines() if line.startswith("ENSEMBLE_JSON")] # json_1 = json.loads(ensemble_jsons_1[0][len("ENSEMBLE_JSON "):], cls=binarycDecoder) diff --git a/tests/core/test_return_store_memaddr.py b/tests/core/test_return_store_memaddr.py index ad2025697e5bf5c529b16a3f385c9e943e8d3e41..1e9083336cdb6a28cc1613c5260589dec1b97c4f 100644 --- a/tests/core/test_return_store_memaddr.py +++ b/tests/core/test_return_store_memaddr.py @@ -1,6 +1,7 @@ import binary_c_python_api import textwrap + def test_return_store_memaddr(): output = binary_c_python_api.return_store_memaddr("") @@ -8,6 +9,7 @@ def test_return_store_memaddr(): print("store memory adress:") print(textwrap.indent(str(output), "\t")) + #### if __name__ == "__main__": test_return_store_memaddr() diff --git a/tests/extra_tests.py b/tests/extra_tests.py index 279e9d970650192dca306dc346c55b45bb33e2a4..85029ded6b366d40672ca53399b66f83854bced8 100644 --- a/tests/extra_tests.py +++ b/tests/extra_tests.py @@ -4,22 +4,26 @@ import os # Script containing extra tests # TODO: store the version somewhere + def test_binary_c_installed(): binary_c_dir = os.getenv("BINARY_C", None) - assert binary_c_dir is not None, "Error: the BINARY_C environment variable is not set." - assert os.path.isfile(os.path.join(binary_c_dir, 'binary_c')), "binary_c doesn't exist!" + assert ( + binary_c_dir is not None + ), "Error: the BINARY_C environment variable is not set." + assert os.path.isfile( + os.path.join(binary_c_dir, "binary_c") + ), "binary_c doesn't exist!" + def test_binary_c_version(): - required_binary_c_versions = ['2.1.7'] + required_binary_c_versions = ["2.1.7"] binary_c_dir = os.getenv("BINARY_C", None) binary_c_config = os.path.join(binary_c_dir, "binary_c-config") installed_binary_c_version = ( - subprocess.run( - [binary_c_config, "version"], stdout=subprocess.PIPE, check=True - ) + subprocess.run([binary_c_config, "version"], stdout=subprocess.PIPE, check=True) .stdout.decode("utf-8") .split() )[0] @@ -27,10 +31,14 @@ def test_binary_c_version(): message = """ The binary_c version that is installed ({}) does not match the binary_c versions ({}) that this release of the binary_c python module requires. - """.format(installed_binary_c_version, required_binary_c_versions) + """.format( + installed_binary_c_version, required_binary_c_versions + ) assert installed_binary_c_version in required_binary_c_versions, message + + ### if __name__ == "__main__": test_binary_c_version() - test_binary_c_installed() \ No newline at end of file + test_binary_c_installed() diff --git a/tests/function_tests.py b/tests/function_tests.py index a01d1ad6491cb06d45bea9e13c8c956325ed788a..a440492da98272e4ffdcf4ee6b7c27595de3b0b3 100644 --- a/tests/function_tests.py +++ b/tests/function_tests.py @@ -1,12 +1,13 @@ from binarycpython.utils.functions import ( - get_help_super, - get_help_all, + get_help_super, + get_help_all, get_help, ) ############################# # Script that contains unit tests for functions from the binarycpython.utils.functions file + def test_get_help_super(): """ Function to test the get_help_super function @@ -15,13 +16,14 @@ def test_get_help_super(): get_help_super_output = get_help_super() get_help_super_keys = get_help_super_output.keys() - assert 'stars' in get_help_super_keys, "missing section" - assert 'binary' in get_help_super_keys, "missing section" - assert 'nucsyn' in get_help_super_keys, "missing section" - assert 'output' in get_help_super_keys, "missing section" - assert 'i/o' in get_help_super_keys, "missing section" - assert 'algorithms' in get_help_super_keys, "missing section" - assert 'misc' in get_help_super_keys, "missing section" + assert "stars" in get_help_super_keys, "missing section" + assert "binary" in get_help_super_keys, "missing section" + assert "nucsyn" in get_help_super_keys, "missing section" + assert "output" in get_help_super_keys, "missing section" + assert "i/o" in get_help_super_keys, "missing section" + assert "algorithms" in get_help_super_keys, "missing section" + assert "misc" in get_help_super_keys, "missing section" + def test_get_help_all(): """ @@ -31,20 +33,24 @@ def test_get_help_all(): get_help_all_output = get_help_all(print_help=False) get_help_all_keys = get_help_all_output.keys() - assert 'stars' in get_help_all_keys, "missing section" - assert 'binary' in get_help_all_keys, "missing section" - assert 'nucsyn' in get_help_all_keys, "missing section" - assert 'output' in get_help_all_keys, "missing section" - assert 'i/o' in get_help_all_keys, "missing section" - assert 'algorithms' in get_help_all_keys, "missing section" - assert 'misc' in get_help_all_keys, "missing section" + assert "stars" in get_help_all_keys, "missing section" + assert "binary" in get_help_all_keys, "missing section" + assert "nucsyn" in get_help_all_keys, "missing section" + assert "output" in get_help_all_keys, "missing section" + assert "i/o" in get_help_all_keys, "missing section" + assert "algorithms" in get_help_all_keys, "missing section" + assert "misc" in get_help_all_keys, "missing section" + def test_get_help(): """ Function to test the get_help function """ - assert get_help("M_1", print_help=False)['parameter_name'] == 'M_1', "get_help('M_1') should return the correct parameter name" + assert ( + get_help("M_1", print_help=False)["parameter_name"] == "M_1" + ), "get_help('M_1') should return the correct parameter name" + def test_all(): test_get_help() @@ -53,4 +59,4 @@ def test_all(): if __name__ == "__main__": - test_all() \ No newline at end of file + test_all() diff --git a/tests/main.py b/tests/main.py index 79fda3743e76e0fcbd9473ac00c23337a8049311..20b1a1fdefb05c5aa426367c2aab3a4289f9b33d 100644 --- a/tests/main.py +++ b/tests/main.py @@ -3,4 +3,4 @@ from population.grid_tests import test_all as test_all_grid_tests from function_tests import test_all as test_all_function_tests test_all_grid_tests() -test_all_function_tests() \ No newline at end of file +test_all_function_tests() diff --git a/tests/population/grid_tests.py b/tests/population/grid_tests.py index 1cb113dd6d98b2a84e8dcc97e19a3eaf1abd4d2c..e4ca1a1fa950f912c2eeeb6e343dda3362f63bee 100644 --- a/tests/population/grid_tests.py +++ b/tests/population/grid_tests.py @@ -13,6 +13,7 @@ from binarycpython.utils.functions import ( output_lines, ) + def test_setup_population(): """ Unit test for setting up the population object @@ -22,6 +23,7 @@ def test_setup_population(): assert isinstance(test_pop, Population), "Population object not created properly" + def test_set_value_population(): """ Unit test for setting values in the population object @@ -29,20 +31,21 @@ def test_set_value_population(): test_pop = Population() - test_pop.set( - verbosity=1, - ) + test_pop.set(verbosity=1,) test_pop.set( - M_1=10, - data_dir='/tmp/binary-c/output', - ensemble_filter_SUPERNOVAE=1, + M_1=10, data_dir="/tmp/binary-c/output", ensemble_filter_SUPERNOVAE=1, ) - assert test_pop.bse_options['M_1'] == 10, 'BSE options not correctly set' - assert test_pop.grid_options['verbosity'] == 1, 'Grid options not correctly set' - assert test_pop.custom_options['data_dir'] == '/tmp/binary-c/output', 'Custom options not correctly set' - assert test_pop.bse_options['ensemble_filter_SUPERNOVAE'] == 1, "Parameters are not getting caught correctly (The wildcards of parameters with %d in their name)" + assert test_pop.bse_options["M_1"] == 10, "BSE options not correctly set" + assert test_pop.grid_options["verbosity"] == 1, "Grid options not correctly set" + assert ( + test_pop.custom_options["data_dir"] == "/tmp/binary-c/output" + ), "Custom options not correctly set" + assert ( + test_pop.bse_options["ensemble_filter_SUPERNOVAE"] == 1 + ), "Parameters are not getting caught correctly (The wildcards of parameters with %d in their name)" + def test_set_argline_output_population(): """ @@ -53,17 +56,21 @@ def test_set_argline_output_population(): test_pop.set( M_1=10, - M_2 = 14.0, # Msun - separation = 0, # 0 = ignored, use period - orbital_period = 4530.0, # days - eccentricity = 0.0, - metallicity = 0.02, - max_evolution_time = 15000, + M_2=14.0, # Msun + separation=0, # 0 = ignored, use period + orbital_period=4530.0, # days + eccentricity=0.0, + metallicity=0.02, + max_evolution_time=15000, verbosity=1, ) argline = test_pop.return_argline() - assert argline == "binary_c M_1 10 M_2 14.0 eccentricity 0.0 max_evolution_time 15000 metallicity 0.02 orbital_period 4530.0 separation 0", "Argline not constructed correctly. Check if values are set properly." + assert ( + argline + == "binary_c M_1 10 M_2 14.0 eccentricity 0.0 max_evolution_time 15000 metallicity 0.02 orbital_period 4530.0 separation 0" + ), "Argline not constructed correctly. Check if values are set properly." + def test_version_info_dict_population(): """ @@ -83,6 +90,7 @@ def test_version_info_dict_population(): assert "GSL_VERSION" in version_info_keys, "Missing item in version info" assert "git_revision" in version_info_keys, "Missing item in version info" + def test_settings_output_population(): """ Unit test for outputting the settings of the population dict object @@ -94,9 +102,16 @@ def test_settings_output_population(): population_settings = test_pop.return_population_settings() population_settings_keys = population_settings.keys() - assert 'bse_options' in population_settings_keys, "Missing information in the population_settings dict" - assert 'grid_options' in population_settings_keys, "Missing information in the population_settings dict" - assert 'custom_options' in population_settings_keys, "Missing information in the population_settings dict" + assert ( + "bse_options" in population_settings_keys + ), "Missing information in the population_settings dict" + assert ( + "grid_options" in population_settings_keys + ), "Missing information in the population_settings dict" + assert ( + "custom_options" in population_settings_keys + ), "Missing information in the population_settings dict" + def test_all_info_population(): """ @@ -109,10 +124,19 @@ def test_all_info_population(): all_info_dict = test_pop.return_all_info() all_info_keys = all_info_dict.keys() - assert 'population_settings' in all_info_keys, "Missing information in the all_info settings dict" - assert 'binary_c_defaults' in all_info_keys, "Missing information in the all_info settings dict" - assert 'binary_c_version_info' in all_info_keys, "Missing information in the all_info settings dict" - assert 'binary_c_help_all' in all_info_keys, "Missing information in the all_info settings dict" + assert ( + "population_settings" in all_info_keys + ), "Missing information in the all_info settings dict" + assert ( + "binary_c_defaults" in all_info_keys + ), "Missing information in the all_info settings dict" + assert ( + "binary_c_version_info" in all_info_keys + ), "Missing information in the all_info settings dict" + assert ( + "binary_c_help_all" in all_info_keys + ), "Missing information in the all_info settings dict" + def test_evolve_single_system_population(): """ @@ -123,12 +147,12 @@ def test_evolve_single_system_population(): test_pop.set( M_1=10, - M_2 = 14.0, # Msun - separation = 0, # 0 = ignored, use period - orbital_period = 4530.0, # days - eccentricity = 0.0, - metallicity = 0.02, - max_evolution_time = 15000, + M_2=14.0, # Msun + separation=0, # 0 = ignored, use period + orbital_period=4530.0, # days + eccentricity=0.0, + metallicity=0.02, + max_evolution_time=15000, verbosity=0, ) @@ -136,8 +160,10 @@ def test_evolve_single_system_population(): assert "SINGLE_STAR_LIFETIME" in output, "Failed to evolve a system" + # def test_custom_logging_memory_adress(): + def test_C_auto_logging_population(): """ Unit test for the creating a custom logging output by setting the @@ -147,27 +173,39 @@ def test_C_auto_logging_population(): test_pop = Population() test_pop.set( - M_1 = 14, - M_2 = 10.0, # Msun - separation = 0, # 0 = ignored, use period - orbital_period = 4530.0, # days - eccentricity = 0.0, - metallicity = 0.02, - max_evolution_time = 15000, + M_1=14, + M_2=10.0, # Msun + separation=0, # 0 = ignored, use period + orbital_period=4530.0, # days + eccentricity=0.0, + metallicity=0.02, + max_evolution_time=15000, verbosity=0, ) - - test_pop.set(C_auto_logging={'MY_HEADER_LINE': ['star[0].mass', 'star[1].mass', 'model.probability']}) + test_pop.set( + C_auto_logging={ + "MY_HEADER_LINE": ["star[0].mass", "star[1].mass", "model.probability"] + } + ) output = test_pop.evolve_single() first_line = output.splitlines()[0].split() - assert first_line[0] == "MY_HEADER_LINE" , "Failed to set the custom logging correctly" - assert first_line[1] == "14" , "Failed to set the custom logging correctly. First mass should be 14" - assert first_line[2] == "10" , "Failed to set the custom logging correctly. Second mass should be 10" - assert first_line[3] == "1" , "Failed to set the custom logging correctly. Probability should be 1" + assert ( + first_line[0] == "MY_HEADER_LINE" + ), "Failed to set the custom logging correctly" + assert ( + first_line[1] == "14" + ), "Failed to set the custom logging correctly. First mass should be 14" + assert ( + first_line[2] == "10" + ), "Failed to set the custom logging correctly. Second mass should be 10" + assert ( + first_line[3] == "1" + ), "Failed to set the custom logging correctly. Probability should be 1" + def test_C_logging_code_population(): """ @@ -179,49 +217,63 @@ def test_C_logging_code_population(): test_pop = Population() test_pop.set( - M_1 = 14, - M_2 = 10.0, # Msun - separation = 10000, # 0 = ignored, use period + M_1=14, + M_2=10.0, # Msun + separation=10000, # 0 = ignored, use period # orbital_period = 4530.0, # days - eccentricity = 0.0, - metallicity = 0.02, - max_evolution_time = 15000, + eccentricity=0.0, + metallicity=0.02, + max_evolution_time=15000, verbosity=0, ) - test_pop.set(C_logging_code='Printf("MY_STELLAR_DATA mass=%g separation=%g probability=%g\\n", stardata->star[0].mass, stardata->common.orbit.separation, stardata->model.probability);') + test_pop.set( + C_logging_code='Printf("MY_STELLAR_DATA mass=%g separation=%g probability=%g\\n", stardata->star[0].mass, stardata->common.orbit.separation, stardata->model.probability);' + ) output = test_pop.evolve_single() first_line = output.splitlines()[0].split() - assert first_line[0] == "MY_STELLAR_DATA" , "Failed to set the custom logging correctly. Headerline should be MY_STELLAR_DATA" - assert first_line[1] == "mass=14" , "Failed to set the custom logging correctly. First mass should be 14" - assert first_line[2] == "separation=10000" , "Failed to set the custom logging correctly. Separation should be 10000" - assert first_line[3] == "probability=1" , "Failed to set the custom logging correctly. Probability should be 1" + assert ( + first_line[0] == "MY_STELLAR_DATA" + ), "Failed to set the custom logging correctly. Headerline should be MY_STELLAR_DATA" + assert ( + first_line[1] == "mass=14" + ), "Failed to set the custom logging correctly. First mass should be 14" + assert ( + first_line[2] == "separation=10000" + ), "Failed to set the custom logging correctly. Separation should be 10000" + assert ( + first_line[3] == "probability=1" + ), "Failed to set the custom logging correctly. Probability should be 1" + def parse_function(self, output): """ Dummy parse function for handling the output of a system """ - parameters = ['mass_1', 'separation', 'probability'] - separator='\t' + parameters = ["mass_1", "separation", "probability"] + separator = "\t" - outfilename = os.path.join(self.grid_options['tmp_dir'], 'output_parse_function_test.txt') + outfilename = os.path.join( + self.grid_options["tmp_dir"], "output_parse_function_test.txt" + ) if os.path.isfile(outfilename): os.remove(outfilename) for el in output_lines(output): headerline = el.split()[0] - if (headerline=='MY_STELLAR_DATA'): + if headerline == "MY_STELLAR_DATA": values = el.split()[1:] if not os.path.exists(outfilename): - with open(outfilename, 'w') as f: - f.write(separator.join(parameters)+'\n') + with open(outfilename, "w") as f: + f.write(separator.join(parameters) + "\n") + + with open(outfilename, "a") as f: + f.write(separator.join(values) + "\n") - with open(outfilename, 'a') as f: - f.write(separator.join(values)+'\n') def test_parse_function_population(): """ @@ -232,12 +284,12 @@ def test_parse_function_population(): test_pop.set( M_1=10, - M_2 = 14.0, # Msun - separation = 0, # 0 = ignored, use period - orbital_period = 4530.0, # days - eccentricity = 0.0, - metallicity = 0.02, - max_evolution_time = 15000, + M_2=14.0, # Msun + separation=0, # 0 = ignored, use period + orbital_period=4530.0, # days + eccentricity=0.0, + metallicity=0.02, + max_evolution_time=15000, verbosity=0, ) @@ -245,25 +297,26 @@ def test_parse_function_population(): C_logging_code='Printf("MY_STELLAR_DATA mass=%g separation=%g probability=%g\\n", stardata->star[0].mass, stardata->common.orbit.separation, stardata->model.probability);' ) - test_pop.set( - parse_function=parse_function - ) + test_pop.set(parse_function=parse_function) # Run with parse function test_pop.evolve_single(parse_function) - outfilename = os.path.join(test_pop.grid_options['tmp_dir'], 'output_parse_function_test.txt') + outfilename = os.path.join( + test_pop.grid_options["tmp_dir"], "output_parse_function_test.txt" + ) assert os.path.isfile(outfilename), "Output file not created!" - with open(outfilename, 'r') as outfile: + with open(outfilename, "r") as outfile: output = outfile.readlines() first_line_split = output[0].strip().split("\t") - assert first_line_split[0] == 'mass_1', "Output header not created correctly" - assert first_line_split[1] == 'separation', "Output header not created correctly" - assert first_line_split[2] == 'probability', "Output header not created correctly" + assert first_line_split[0] == "mass_1", "Output header not created correctly" + assert first_line_split[1] == "separation", "Output header not created correctly" + assert first_line_split[2] == "probability", "Output header not created correctly" assert len(output) > 1, "File doesn't seem to contain any real data" + def test_all(): test_setup_population() test_set_value_population() @@ -276,9 +329,10 @@ def test_all(): test_C_logging_code_population() test_parse_function_population() + if __name__ == "__main__": test_all() - print('yo') + print("yo") # quit() # print(len(test_pop.return_binary_c_defaults())) diff --git a/tests/scaling/argparse_setup.py b/tests/scaling/argparse_setup.py index 9a245dd05aba98443b536c3b69ac06f9f154e271..0765c7f77d0f62e94cb6f9aa300855f0ee9a11a0 100644 --- a/tests/scaling/argparse_setup.py +++ b/tests/scaling/argparse_setup.py @@ -35,4 +35,4 @@ args = parser.parse_args() res_m_1 = int(args.resolution_M_1) res_per = int(args.resolution_per) AMT_CORES = int(args.amt_cores) -name_testcase = args.name_testcase \ No newline at end of file +name_testcase = args.name_testcase diff --git a/tests/scaling/plot_scaling.py b/tests/scaling/plot_scaling.py index be3c1d5e79bfe3668621d75eb97b137c45b1826d..e1b0be376d112a1b7c59dc08ed195b06ca0d8687 100644 --- a/tests/scaling/plot_scaling.py +++ b/tests/scaling/plot_scaling.py @@ -85,9 +85,11 @@ def plot_runtime(calculated_results, unique_amt_cores, unique_amt_systems): plt.show() + def amdahl(f, n): return 1.0 / ((1 - f) + (f / n)) + def plot_speedup_and_efficiency(result_json_filenames, plot_output_dir, name_testcase): """ Plotting routine to plot the speedup and efficiency of scaling @@ -124,7 +126,9 @@ def plot_speedup_and_efficiency(result_json_filenames, plot_output_dir, name_tes amt_cpus = int(amt_cpus) speedup = linear_mean / mp_mean stddev_speedup = ( - math.sqrt((linear_stdev / linear_mean) ** 2 + (mp_stdev / mp_mean) ** 2) + math.sqrt( + (linear_stdev / linear_mean) ** 2 + (mp_stdev / mp_mean) ** 2 + ) * speedup ) efficiency = speedup / int(amt_cpus) @@ -149,18 +153,17 @@ def plot_speedup_and_efficiency(result_json_filenames, plot_output_dir, name_tes ) # Plot the efficiencies - ax2.plot(cpus, efficiencies, alpha=0.5, linestyle='dotted') + ax2.plot(cpus, efficiencies, alpha=0.5, linestyle="dotted") # x_position_shift += 0.1 ##################### # Extra plots # 100 % scaling line - ax1.plot([1, max(cpus)], [1, max(cpus)], '--', alpha=0.25, label='100% scaling') - ax2.axhline(y=1, linestyle='--', alpha=0.25, label='100% efficient') + ax1.plot([1, max(cpus)], [1, max(cpus)], "--", alpha=0.25, label="100% scaling") + ax2.axhline(y=1, linestyle="--", alpha=0.25, label="100% efficient") # ax1.plot([1, max(cpus)], [1, max(cpus)], '--', alpha=0.25, label='100% scaling') - # Amdahls law fitting # Old stuff # Do Amdahls law fitting @@ -192,7 +195,14 @@ def plot_speedup_and_efficiency(result_json_filenames, plot_output_dir, name_tes ax2.set_ylim(ymin=0, ymax=None) - - fig.savefig(os.path.join(plot_output_dir, "speedup_scaling_{}.{}".format(name_testcase, "png"))) - fig.savefig(os.path.join(plot_output_dir, "speedup_scaling_{}.{}".format(name_testcase, "pdf"))) + fig.savefig( + os.path.join( + plot_output_dir, "speedup_scaling_{}.{}".format(name_testcase, "png") + ) + ) + fig.savefig( + os.path.join( + plot_output_dir, "speedup_scaling_{}.{}".format(name_testcase, "pdf") + ) + ) plt.show() diff --git a/tests/scaling/scaling_functions.py b/tests/scaling/scaling_functions.py index 0763dc3b07b7f6d7a60480708dca72a13aee0e41..97a5779576a1235c3ca2829251c40dea6cd0455f 100644 --- a/tests/scaling/scaling_functions.py +++ b/tests/scaling/scaling_functions.py @@ -12,12 +12,14 @@ import numpy as np from binarycpython.utils.grid import Population + def dummy_parsefunction(self, output): """ Dummy parsing function """ pass + def get_mp_results(population, cpu_list, amt_repeats, total_systems): """ Function that runs a population <amt_repeats> for each @@ -49,6 +51,7 @@ def get_mp_results(population, cpu_list, amt_repeats, total_systems): return mp_dict + def get_linear_results(population, amt_repeats, total_systems): """ Function that runs the population linearly (using 1 core) <amt_repeats> times @@ -61,38 +64,35 @@ def get_linear_results(population, amt_repeats, total_systems): population.evolve_population() total_lin = time.time() - total_lin_start - print( - "linear run with {} systems: {}".format( - total_systems, total_lin - ) - ) + print("linear run with {} systems: {}".format(total_systems, total_lin)) linear_times.append(total_lin) return linear_times + def run_systems_for_scaling_comparison(settings_dict): """ Function that runs the systems for the scaling comparison """ - amount_of_cpus = settings_dict['amount_of_cpus'] - amount_of_cores = settings_dict['amount_of_cores'] - amt_repeats = settings_dict['amt_repeats'] - stepsize_cpus = settings_dict['stepsize_cpus'] - testcase = settings_dict['testcase'] - plot_dir = settings_dict['plot_dir'] - result_dir = settings_dict['result_dir'] + amount_of_cpus = settings_dict["amount_of_cpus"] + amount_of_cores = settings_dict["amount_of_cores"] + amt_repeats = settings_dict["amt_repeats"] + stepsize_cpus = settings_dict["stepsize_cpus"] + testcase = settings_dict["testcase"] + plot_dir = settings_dict["plot_dir"] + result_dir = settings_dict["result_dir"] - resolutions = settings_dict['resolutions'] + resolutions = settings_dict["resolutions"] - # For each set of resolutions + # For each set of resolutions for resolution in resolutions: # Some calculated values total_systems = int(np.prod([el for el in resolution.values()])) hostname = socket.gethostname() # Generate the range of cpu numbers - cpu_list = np.arange(1, amount_of_cpus+1, stepsize_cpus) + cpu_list = np.arange(1, amount_of_cpus + 1, stepsize_cpus) if not cpu_list[-1] == amount_of_cpus: cpu_list = np.append(cpu_list, np.array([amount_of_cpus])) @@ -120,7 +120,9 @@ def run_systems_for_scaling_comparison(settings_dict): longname="Primary mass", valuerange=[1, 100], resolution="{}".format(resolution["M_1"]), - spacingfunc="const(math.log(1), math.log(100), {})".format(resolution["M_1"]), + spacingfunc="const(math.log(1), math.log(100), {})".format( + resolution["M_1"] + ), precode="M_1=math.exp(lnm1)", probdist="three_part_powerlaw(M_1, 0.1, 0.5, 1.0, 100, -1.3, -2.3, -2.3)*M_1", dphasevol="dlnm1", @@ -132,13 +134,13 @@ def run_systems_for_scaling_comparison(settings_dict): name="q", longname="Mass ratio", valuerange=["0.1/M_1", 1], - resolution="{}".format(resolution['q']), - spacingfunc="const(0.1/M_1, 1, {})".format(resolution['q']), + resolution="{}".format(resolution["q"]), + spacingfunc="const(0.1/M_1, 1, {})".format(resolution["q"]), probdist="flatsections(q, [{'min': 0.1/M_1, 'max': 0.8, 'height': 1}, {'min': 0.8, 'max': 1.0, 'height': 1.0}])", dphasevol="dq", precode="M_2 = q * M_1", parameter_name="M_2", - condition="", # Impose a condition on this grid variable. Mostly for a check for yourself + condition="", # Impose a condition on this grid variable. Mostly for a check for yourself ) test_pop.add_grid_variable( @@ -147,7 +149,7 @@ def run_systems_for_scaling_comparison(settings_dict): valuerange=[-2, 12], resolution="{}".format(resolution["per"]), spacingfunc="np.linspace(-2, 12, {})".format(resolution["per"]), - precode="orbital_period = 10** logper\n", # TODO: + precode="orbital_period = 10** logper\n", # TODO: probdist="gaussian(logper,4.8, 2.3, -2.0, 12.0)", parameter_name="orbital_period", dphasevol="dln10per", @@ -169,5 +171,10 @@ def run_systems_for_scaling_comparison(settings_dict): # Write to file and make sure the directory exists. os.makedirs(result_dir, exist_ok=True) - with open(os.path.join(result_dir, "{}_{}_systems.json".format(hostname, total_systems)), "w") as f: + with open( + os.path.join( + result_dir, "{}_{}_systems.json".format(hostname, total_systems) + ), + "w", + ) as f: f.write(json.dumps(result_dict, indent=4)) diff --git a/tests/scaling/scaling_script.py b/tests/scaling/scaling_script.py index 0e3fb46097039b5911a496f2d063bf2f9394dc4f..6f578243b85ee66dcbf28b268e4311f773bc9174 100644 --- a/tests/scaling/scaling_script.py +++ b/tests/scaling/scaling_script.py @@ -22,34 +22,54 @@ import psutil import numpy as np from binarycpython.utils.grid import Population -from scaling_functions import get_mp_results, get_linear_results, run_systems_for_scaling_comparison +from scaling_functions import ( + get_mp_results, + get_linear_results, + run_systems_for_scaling_comparison, +) from plot_scaling import plot_speedup_and_efficiency settings_dict = {} -settings_dict['amt_repeats'] = 1 # Number of times the population will be repeated per cpu - # number. Better do it several times than only run it once -settings_dict['resolutions'] = [ # List of resolution of sampling of the population. Useful for checking whether population size has an effect on the results +settings_dict[ + "amt_repeats" +] = 1 # Number of times the population will be repeated per cpu +# number. Better do it several times than only run it once +settings_dict[ + "resolutions" +] = [ # List of resolution of sampling of the population. Useful for checking whether population size has an effect on the results {"M_1": 10, "per": 10, "q": 2} -] -settings_dict['result_dir'] = "scaling_results" # Relative of absolute directory where results are writting to -settings_dict['plot_dir'] = "scaling_plots" # Directory where the plots will be stored -settings_dict['testcase'] = "linear vs MP batched" # 'name' of the calculation. will be used in the plot -settings_dict['stepsize_cpus'] = 1 # Stepsize for the cpu number generator. Try to keep this - # low, to get the most reliable results -settings_dict['amount_of_cpus'] = 4 # Amount of logical cpus the machine has (this is not the same as physical cpus!) +] +settings_dict[ + "result_dir" +] = "scaling_results" # Relative of absolute directory where results are writting to +settings_dict["plot_dir"] = "scaling_plots" # Directory where the plots will be stored +settings_dict[ + "testcase" +] = "linear vs MP batched" # 'name' of the calculation. will be used in the plot +settings_dict[ + "stepsize_cpus" +] = 1 # Stepsize for the cpu number generator. Try to keep this +# low, to get the most reliable results +settings_dict[ + "amount_of_cpus" +] = 4 # Amount of logical cpus the machine has (this is not the same as physical cpus!) # settings_dict['amount_of_cpus'] = psutil.cpu_count() -settings_dict['amount_of_cores'] = 2 # The amount of physical cores. This value - # is not vital bit will be used in the plot -# settings_dict['amount_of_cores'] = psutil.cpu_count(logical=False) # You can also use the psutil function to get - # the amt of physical cores, but this isnt fully - # reliable (in mar 2020 it didnt get this value - # right when there were multiple sockets) +settings_dict["amount_of_cores"] = 2 # The amount of physical cores. This value +# is not vital bit will be used in the plot +# settings_dict['amount_of_cores'] = psutil.cpu_count(logical=False) # You can also use the psutil function to get +# the amt of physical cores, but this isnt fully +# reliable (in mar 2020 it didnt get this value +# right when there were multiple sockets) run_systems_for_scaling_comparison(settings_dict) ################################# # Files -SCALING_RESULT_DIR = settings_dict['result_dir'] -RESULT_JSONS = [os.path.join(SCALING_RESULT_DIR, file) for file in os.listdir(SCALING_RESULT_DIR) if file.endswith('.json')] # Automatically grab all of the stuff, override it +SCALING_RESULT_DIR = settings_dict["result_dir"] +RESULT_JSONS = [ + os.path.join(SCALING_RESULT_DIR, file) + for file in os.listdir(SCALING_RESULT_DIR) + if file.endswith(".json") +] # Automatically grab all of the stuff, override it # FILENAMES = [ # "david-Lenovo-IdeaPad-S340-14IWL_100_systems.json", @@ -59,8 +79,4 @@ RESULT_JSONS = [os.path.join(SCALING_RESULT_DIR, file) for file in os.listdir(SC # for filename in FILENAMES: # RESULT_JSONS.append(os.path.join(os.path.abspath(SCALING_RESULT_DIR), filename)) -plot_speedup_and_efficiency( - RESULT_JSONS, - SCALING_RESULT_DIR, - "Example" -) \ No newline at end of file +plot_speedup_and_efficiency(RESULT_JSONS, SCALING_RESULT_DIR, "Example")