diff --git a/binarycpython/__init__.py b/binarycpython/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..d0b3153d7a6216136e5f039ba80ae6f964549f0a 100644
--- a/binarycpython/__init__.py
+++ b/binarycpython/__init__.py
@@ -0,0 +1,3 @@
+"""
+Init function for binarycpython module
+"""
diff --git a/binarycpython/core/__init__.py b/binarycpython/core/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..5e5747ae2d51851fd0146be6d81868d4728e9467 100644
--- a/binarycpython/core/__init__.py
+++ b/binarycpython/core/__init__.py
@@ -0,0 +1,3 @@
+"""
+Init function for the binarycython.core module
+"""
\ No newline at end of file
diff --git a/binarycpython/tests/extra_tests.py b/binarycpython/tests/extra_tests.py
index 85029ded6b366d40672ca53399b66f83854bced8..e0a1f96dc693a5ae590df773589691fa66fdfc0e 100644
--- a/binarycpython/tests/extra_tests.py
+++ b/binarycpython/tests/extra_tests.py
@@ -1,3 +1,7 @@
+"""
+Extra unittests
+"""
+
 import subprocess
 import os
 
@@ -6,6 +10,10 @@ import os
 
 
 def test_binary_c_installed():
+    """
+    Unittest to check if binary_c actually exists
+    """
+
     binary_c_dir = os.getenv("BINARY_C", None)
 
     assert (
@@ -17,6 +25,10 @@ def test_binary_c_installed():
 
 
 def test_binary_c_version():
+    """
+    Unittest to check if binary_c has the correct version
+    """
+
     required_binary_c_versions = ["2.1.7"]
 
     binary_c_dir = os.getenv("BINARY_C", None)
diff --git a/binarycpython/tests/main.py b/binarycpython/tests/main.py
index 81fff0a97b1d3ccbc3edf634608f6479511517bf..2c8111696dfd428c2505c9ef3644ee40e177b92d 100644
--- a/binarycpython/tests/main.py
+++ b/binarycpython/tests/main.py
@@ -1,4 +1,7 @@
-# Main file for the tests. This file imports all the combined_test functions from all files.
+"""
+Main file for the tests. This file imports all the combined_test functions from all files.
+"""
+
 import unittest
 
 from binarycpython.tests.test_c_bindings import *
diff --git a/binarycpython/tests/python_API_test.py b/binarycpython/tests/python_API_test.py
index 510141312dfe9a0ad9c574001aa833bea1e9df4b..338be6fdef84ddd81c3733896e08507d0de2a88d 100755
--- a/binarycpython/tests/python_API_test.py
+++ b/binarycpython/tests/python_API_test.py
@@ -1,5 +1,10 @@
 #!/usr/bin/python3
 
+"""
+Some tests that can be run quickly
+TODO: turn these into real tests
+"""
+
 from binarycpython import _binary_c_bindings
 
 from binarycpython.utils.custom_logging_functions import (
@@ -20,6 +25,10 @@ import textwrap
 
 # Evolution functions
 def test_run_system():
+    """
+    Test if run_system runs without errors
+    """
+
     m1 = 15.0  # Msun
     m2 = 14.0  # Msun
     separation = 0  # 0 = ignored, use period
@@ -44,6 +53,10 @@ def test_run_system():
 
 
 def test_run_system_with_log():
+    """
+    Test if run_system runs with outputting a logfile
+    """
+
     m1 = 15.0  # Msun
     m2 = 14.0  # Msun
     separation = 0  # 0 = ignored, use period
@@ -75,6 +88,10 @@ def test_run_system_with_log():
 
 
 def test_run_system_with_custom_logging():
+    """
+    Test if run_system runs with custom logging
+    """
+
     # generate logging lines. Here you can choose whatever you want to have logged, and with what header
     # this generates working print statements
     logging_line = autogen_C_logging_code(
@@ -124,6 +141,10 @@ def test_run_system_with_custom_logging():
 
 # Testing other utility functions
 def test_return_help():
+    """
+    Test if return_help runs without errors
+    """
+
     output = _binary_c_bindings.return_help("M_1")
 
     print("function: test_return_help")
@@ -132,6 +153,10 @@ def test_return_help():
 
 
 def test_return_arglines():
+    """
+    Test if return_arglines runs without errors
+    """
+
     output = _binary_c_bindings.return_arglines()
 
     print("function: test_return_arglines")
@@ -140,6 +165,10 @@ def test_return_arglines():
 
 
 def test_return_help_all():
+    """
+    Test if return_help_all runs without errors
+    """
+
     output = _binary_c_bindings.return_help_all("M_1")
 
     print("function: test_return_help_all")
@@ -148,6 +177,10 @@ def test_return_help_all():
 
 
 def test_return_version_info():
+    """
+    Test if return_version_info runs without errors
+    """
+
     output = _binary_c_bindings.return_version_info()
 
     print("function: test_return_version_info")
@@ -157,6 +190,10 @@ def test_return_version_info():
 
 # Testing other functions
 def test_return_store():
+    """
+    Test if return_store runs without errors
+    """
+
     output = _binary_c_bindings.return_store_memaddr("")
 
     print("function: test_return_store")
diff --git a/binarycpython/tests/test_c_bindings.py b/binarycpython/tests/test_c_bindings.py
index 5d016dcdf030f7d2b95f93191d705f4ec5032b98..b2d97ee48e53d91fb9a816be4ac094aaa676ed83 100644
--- a/binarycpython/tests/test_c_bindings.py
+++ b/binarycpython/tests/test_c_bindings.py
@@ -1,3 +1,7 @@
+"""
+Unittests for the c-bindings
+"""
+
 import os
 import sys
 import time
@@ -15,7 +19,7 @@ from binarycpython.utils.functions import (
     merge_dicts,
     handle_ensemble_string_to_json,
     verbose_print,
-    extract_ensemble_json_from_string
+    extract_ensemble_json_from_string,
 )
 
 # https://docs.python.org/3/library/unittest.html
@@ -59,6 +63,7 @@ ensemble_filters_off {8} ensemble_filter_{9} 1 probability 0.1"
 
     return argstring
 
+
 #######################################################################################################################################################
 ### General run_system test
 #######################################################################################################################################################
@@ -70,6 +75,10 @@ class test_run_system(unittest.TestCase):
     """
 
     def test_output(self):
+        """
+        General test if run_system works
+        """
+
         m1 = 15.0  # Msun
         m2 = 14.0  # Msun
         separation = 0  # 0 = ignored, use period
@@ -107,12 +116,17 @@ class test_return_store_memaddr(unittest.TestCase):
     """
 
     def test_return_store_memaddr(self):
+        """
+        Test to see if the memory adress is returned properly
+        """
+
         output = _binary_c_bindings.return_store_memaddr()
 
         # print("function: test_return_store")
         # print("store memory adress:")
         # print(textwrap.indent(str(output), "\t"))
 
+        self.assertTrue(isinstance(output, int))
         self.assertNotEqual(output, -1, "memory adress not created properly")
 
         # TODO: check if we can built in some signal for how successful this was.
@@ -130,6 +144,9 @@ class TestEnsemble(unittest.TestCase):
     """
 
     def __init__(self, *args, **kwargs):
+        """
+        init function
+        """
         super(TestEnsemble, self).__init__(*args, **kwargs)
 
     def test_return_persistent_data_memaddr(self):
@@ -146,7 +163,7 @@ class TestEnsemble(unittest.TestCase):
 
     def test_minimal_ensemble_output(self):
         """
-        test_case to check if the ensemble output is correctly output 
+        test_case to check if the ensemble output is correctly output
         """
 
         m1 = 2  # Msun
@@ -154,7 +171,10 @@ class TestEnsemble(unittest.TestCase):
 
         # Direct output commands
         argstring_1 = return_argstring(
-            m1=m1, m2=m2, ensemble_filter="STELLAR_TYPE_COUNTS", defer_ensemble=0 # no defer to memory location. just output it
+            m1=m1,
+            m2=m2,
+            ensemble_filter="STELLAR_TYPE_COUNTS",
+            defer_ensemble=0,  # no defer to memory location. just output it
         )
 
         output_1 = _binary_c_bindings.run_system(argstring=argstring_1)
@@ -164,7 +184,7 @@ class TestEnsemble(unittest.TestCase):
 
         test_json = extract_ensemble_json_from_string(output_1)
         self.assertIn("number_counts", test_json)
-        self.assertNotEqual(test_json["number_counts"], {})        
+        self.assertNotEqual(test_json["number_counts"], {})
 
     def test_minimal_ensemble_output_defer(self):
         """
@@ -178,17 +198,25 @@ class TestEnsemble(unittest.TestCase):
 
         # Direct output commands
         argstring_1 = return_argstring(
-            m1=m1, m2=m2, orbital_period=1000000000, ensemble_filter="STELLAR_TYPE_COUNTS", defer_ensemble=1 # no defer to memory location. just output it
+            m1=m1,
+            m2=m2,
+            orbital_period=1000000000,
+            ensemble_filter="STELLAR_TYPE_COUNTS",
+            defer_ensemble=1,  # no defer to memory location. just output it
         )
 
-        output_1 = _binary_c_bindings.run_system(argstring=argstring_1, persistent_data_memaddr=persistent_data_memaddr)
+        output_1 = _binary_c_bindings.run_system(
+            argstring=argstring_1, persistent_data_memaddr=persistent_data_memaddr
+        )
 
-        # 
-        self.assertNotIn('ENSEMBLE_JSON', output_1)
+        #
+        self.assertNotIn("ENSEMBLE_JSON", output_1)
 
         # free memory and output the stuff.
-        raw_json_output = _binary_c_bindings.free_persistent_data_memaddr_and_return_json_output(
-            persistent_data_memaddr
+        raw_json_output = (
+            _binary_c_bindings.free_persistent_data_memaddr_and_return_json_output(
+                persistent_data_memaddr
+            )
         )
         ensemble_json_output = extract_ensemble_json_from_string(raw_json_output)
 
@@ -205,59 +233,87 @@ class TestEnsemble(unittest.TestCase):
 
         # Direct output commands
         argstring_1 = return_argstring(
-            m1=m1, m2=m2, orbital_period=1000000000, ensemble_filter="STELLAR_TYPE_COUNTS" # no defer to memory location. just output it
+            m1=m1,
+            m2=m2,
+            orbital_period=1000000000,
+            ensemble_filter="STELLAR_TYPE_COUNTS",  # no defer to memory location. just output it
         )
         argstring_2 = return_argstring(
-            m1=10, m2=m2, orbital_period=1000000000, ensemble_filter="STELLAR_TYPE_COUNTS" # no defer to memory location. just output it
+            m1=10,
+            m2=m2,
+            orbital_period=1000000000,
+            ensemble_filter="STELLAR_TYPE_COUNTS",  # no defer to memory location. just output it
         )
 
-        # 
+        #
         output_1 = _binary_c_bindings.run_system(argstring=argstring_1)
         output_2 = _binary_c_bindings.run_system(argstring=argstring_2)
 
-        # 
+        #
         output_json_1 = extract_ensemble_json_from_string(output_1)
         output_json_2 = extract_ensemble_json_from_string(output_2)
 
-        # 
+        #
         merged_dict = merge_dicts(output_json_1, output_json_2)
 
-        self.assertIn('number_counts', merged_dict)
-        self.assertIn('stellar_type', merged_dict['number_counts'])
+        self.assertIn("number_counts", merged_dict)
+        self.assertIn("stellar_type", merged_dict["number_counts"])
 
-        for key in output_json_1['number_counts']['stellar_type']['0']:
-            self.assertIn(key, merged_dict['number_counts']['stellar_type']['0'])
+        for key in output_json_1["number_counts"]["stellar_type"]["0"]:
+            self.assertIn(key, merged_dict["number_counts"]["stellar_type"]["0"])
 
-        for key in output_json_2['number_counts']['stellar_type']['0']:
-            self.assertIn(key, merged_dict['number_counts']['stellar_type']['0'])
+        for key in output_json_2["number_counts"]["stellar_type"]["0"]:
+            self.assertIn(key, merged_dict["number_counts"]["stellar_type"]["0"])
 
         # compare stuff:
-        self.assertLess(np.abs(output_json_1['number_counts']['stellar_type']['0']['CHeB'] + output_json_2['number_counts']['stellar_type']['0']['CHeB'] -  merged_dict['number_counts']['stellar_type']['0']['CHeB']), 1e-10)
-        self.assertLess(np.abs(output_json_1['number_counts']['stellar_type']['0']['MS'] + output_json_2['number_counts']['stellar_type']['0']['MS'] -  merged_dict['number_counts']['stellar_type']['0']['MS']), 1e-10)
+        self.assertLess(
+            np.abs(
+                output_json_1["number_counts"]["stellar_type"]["0"]["CHeB"]
+                + output_json_2["number_counts"]["stellar_type"]["0"]["CHeB"]
+                - merged_dict["number_counts"]["stellar_type"]["0"]["CHeB"]
+            ),
+            1e-10,
+        )
+        self.assertLess(
+            np.abs(
+                output_json_1["number_counts"]["stellar_type"]["0"]["MS"]
+                + output_json_2["number_counts"]["stellar_type"]["0"]["MS"]
+                - merged_dict["number_counts"]["stellar_type"]["0"]["MS"]
+            ),
+            1e-10,
+        )
 
     def test_compare_added_systems_with_double_deferred_systems(self):
-        # test to run 2 systems without deferring, and merging them manually. Then run 2 systems with defer and then output them.
+        """
+        test to run 2 systems without deferring, and merging them manually. Then run 2 systems with defer and then output them.
+        """
 
         m1 = 2  # Msun
         m2 = 0.1  # Msun
 
         # Direct output commands
         argstring_1 = return_argstring(
-            m1=m1, m2=m2, orbital_period=1000000000, ensemble_filter="STELLAR_TYPE_COUNTS" # no defer to memory location. just output it
+            m1=m1,
+            m2=m2,
+            orbital_period=1000000000,
+            ensemble_filter="STELLAR_TYPE_COUNTS",  # no defer to memory location. just output it
         )
         argstring_2 = return_argstring(
-            m1=10, m2=m2, orbital_period=1000000000, ensemble_filter="STELLAR_TYPE_COUNTS" # no defer to memory location. just output it
+            m1=10,
+            m2=m2,
+            orbital_period=1000000000,
+            ensemble_filter="STELLAR_TYPE_COUNTS",  # no defer to memory location. just output it
         )
 
-        # 
+        #
         output_1 = _binary_c_bindings.run_system(argstring=argstring_1)
         output_2 = _binary_c_bindings.run_system(argstring=argstring_2)
 
-        # 
+        #
         output_json_1 = extract_ensemble_json_from_string(output_1)
         output_json_2 = extract_ensemble_json_from_string(output_2)
 
-        # 
+        #
         merged_dict = merge_dicts(output_json_1, output_json_2)
 
         ###############################
@@ -265,29 +321,53 @@ class TestEnsemble(unittest.TestCase):
         persistent_data_memaddr = _binary_c_bindings.return_persistent_data_memaddr()
 
         argstring_1_deferred = return_argstring(
-            m1=m1, m2=m2, orbital_period=1000000000, ensemble_filter="STELLAR_TYPE_COUNTS", defer_ensemble=1 # no defer to memory location. just output it
+            m1=m1,
+            m2=m2,
+            orbital_period=1000000000,
+            ensemble_filter="STELLAR_TYPE_COUNTS",
+            defer_ensemble=1,  # no defer to memory location. just output it
         )
         argstring_2_deferred = return_argstring(
-            m1=10, m2=m2, orbital_period=1000000000, ensemble_filter="STELLAR_TYPE_COUNTS", defer_ensemble=1 # no defer to memory location. just output it
+            m1=10,
+            m2=m2,
+            orbital_period=1000000000,
+            ensemble_filter="STELLAR_TYPE_COUNTS",
+            defer_ensemble=1,  # no defer to memory location. just output it
         )
 
         # run
-        _ = _binary_c_bindings.run_system(argstring=argstring_1_deferred, persistent_data_memaddr=persistent_data_memaddr)
-        _ = _binary_c_bindings.run_system(argstring=argstring_2_deferred, persistent_data_memaddr=persistent_data_memaddr)
+        _ = _binary_c_bindings.run_system(
+            argstring=argstring_1_deferred,
+            persistent_data_memaddr=persistent_data_memaddr,
+        )
+        _ = _binary_c_bindings.run_system(
+            argstring=argstring_2_deferred,
+            persistent_data_memaddr=persistent_data_memaddr,
+        )
 
         # output
-        raw_json_output = _binary_c_bindings.free_persistent_data_memaddr_and_return_json_output(
-            persistent_data_memaddr
+        raw_json_output = (
+            _binary_c_bindings.free_persistent_data_memaddr_and_return_json_output(
+                persistent_data_memaddr
+            )
         )
         ensemble_json_output = extract_ensemble_json_from_string(raw_json_output)
 
         # CHeck all keys are present
-        for key in merged_dict['number_counts']['stellar_type']['0']:
-            self.assertIn(key, ensemble_json_output['number_counts']['stellar_type']['0'])
+        for key in merged_dict["number_counts"]["stellar_type"]["0"]:
+            self.assertIn(
+                key, ensemble_json_output["number_counts"]["stellar_type"]["0"]
+            )
 
         # Check if they are of the same value
-        for key in merged_dict['number_counts']['stellar_type']['0']:
-            self.assertLess(np.abs(merged_dict['number_counts']['stellar_type']['0'][key]-ensemble_json_output['number_counts']['stellar_type']['0'][key]), 1e-10)
+        for key in merged_dict["number_counts"]["stellar_type"]["0"]:
+            self.assertLess(
+                np.abs(
+                    merged_dict["number_counts"]["stellar_type"]["0"][key]
+                    - ensemble_json_output["number_counts"]["stellar_type"]["0"][key]
+                ),
+                1e-10,
+            )
 
     def test_combine_with_empty_json(self):
         """
@@ -298,7 +378,10 @@ class TestEnsemble(unittest.TestCase):
         m2 = 0.1  # Msun
 
         argstring_1 = return_argstring(
-            m1=m1, m2=m2, orbital_period=1000000000, ensemble_filter="STELLAR_TYPE_COUNTS" # no defer to memory location. just output it
+            m1=m1,
+            m2=m2,
+            orbital_period=1000000000,
+            ensemble_filter="STELLAR_TYPE_COUNTS",  # no defer to memory location. just output it
         )
 
         output_1 = _binary_c_bindings.run_system(argstring=argstring_1)
@@ -326,7 +409,7 @@ class TestEnsemble(unittest.TestCase):
 
         output_1 = _binary_c_bindings.run_system(argstring=argstring_1)
 
-        # 
+        #
         output_json_1 = extract_ensemble_json_from_string(output_1)
 
         keys = json_1.keys()
@@ -339,6 +422,7 @@ class TestEnsemble(unittest.TestCase):
         self.assertIn("distributions", keys)
         self.assertIn("scalars", keys)
 
+
 #######################################################################################################################################################
 ### ensemble tests
 #######################################################################################################################################################
diff --git a/binarycpython/tests/test_custom_logging.py b/binarycpython/tests/test_custom_logging.py
index a6159732764d519dffadf966273e9c9e2117fbf0..4bab0a4c7ab179d4df2048cb1ca0e32398bf585c 100644
--- a/binarycpython/tests/test_custom_logging.py
+++ b/binarycpython/tests/test_custom_logging.py
@@ -1,3 +1,7 @@
+"""
+Unittests for the custom_logging module
+"""
+
 import unittest
 
 from binarycpython.utils.custom_logging_functions import *
@@ -11,6 +15,9 @@ class test_custom_logging(unittest.TestCase):
     """
 
     def test_autogen_C_logging_code(self):
+        """
+        Tests for the autogeneration of a print statement from a dictionary. and then checking if the output is correct
+        """
 
         input_dict_1 = None
         output_1 = autogen_C_logging_code(input_dict_1, verbose=1)
@@ -36,6 +43,10 @@ class test_custom_logging(unittest.TestCase):
         self.assertEqual(output_3, None, msg="Output should be None")
 
     def test_binary_c_log_code(self):
+        """
+        Test to see if passing a print statement to the function results in correct binary_c output
+        """
+
         input_1 = "None"
         output_1 = binary_c_log_code(input_1, verbose=1)
         self.assertEqual(output_1, None, msg="Output should be None")
@@ -50,6 +61,10 @@ class test_custom_logging(unittest.TestCase):
         )
 
     def test_binary_c_write_log_code(self):
+        """
+        Tests to see if writing the code to a file and reading that out again is the same
+        """
+
         input_1 = '#pragma push_macro("MAX")\n#pragma push_macro("MIN")\n#undef MAX\n#undef MIN\n#include "binary_c.h"\n#include "RLOF/RLOF_prototypes.h"\n\n// add visibility __attribute__ ((visibility ("default"))) to it \nvoid binary_c_API_function custom_output_function(struct stardata_t * stardata);\nvoid binary_c_API_function custom_output_function(struct stardata_t * stardata)\n{\n    // struct stardata_t * stardata = (struct stardata_t *)x;\n    Printf("MY_STELLAR_DATA %g %g %g %g\\n",((double)stardata->model.time),((double)stardata->star[0].mass),((double)stardata->model.probability),((double)stardata->model.dt));;\n}\n\n#undef MAX \n#undef MIN\n#pragma pop_macro("MIN")\n#pragma pop_macro("MAX")    '
         binary_c_write_log_code(
             input_1,
@@ -70,6 +85,10 @@ class test_custom_logging(unittest.TestCase):
         self.assertEqual(repr(input_1), content_file, msg="Contents are not similar")
 
     def test_from_binary_c_config(self):
+        """
+        Tests for interfacing with binary_c-config
+        """
+
         # not going to test everything here, just the version and any output at all
 
         BINARY_C_DIR = os.getenv("BINARY_C")
@@ -90,6 +109,10 @@ class test_custom_logging(unittest.TestCase):
         self.assertEqual(output_2, "2.1.7", msg="binary_c version doesnt match")
 
     def test_return_compilation_dict(self):
+        """
+        Tests to see if the compilation dictionary contains the correct keys
+        """
+
         # Just going to check whether the dictionary has the components it needs
         # TODO: check whether we need to make this better
 
@@ -102,6 +125,10 @@ class test_custom_logging(unittest.TestCase):
         self.assertTrue("inc" in output)
 
     def test_create_and_load_logging_function(self):
+        """
+        Tests checking the output of create_and_load_logging_function. Should return a valid memory int and a correct filename
+        """
+
         #
         input_1 = '#pragma push_macro("MAX")\n#pragma push_macro("MIN")\n#undef MAX\n#undef MIN\n#include "binary_c.h"\n#include "RLOF/RLOF_prototypes.h"\n\n// add visibility __attribute__ ((visibility ("default"))) to it \nvoid binary_c_API_function custom_output_function(struct stardata_t * stardata);\nvoid binary_c_API_function custom_output_function(struct stardata_t * stardata)\n{\n    // struct stardata_t * stardata = (struct stardata_t *)x;\n    Printf("MY_STELLAR_DATA %g %g %g %g\\n",((double)stardata->model.time),((double)stardata->star[0].mass),((double)stardata->model.probability),((double)stardata->model.dt));;\n}\n\n#undef MAX \n#undef MIN\n#pragma pop_macro("MIN")\n#pragma pop_macro("MAX")    '
         output_1 = create_and_load_logging_function(input_1, verbose=1)
diff --git a/binarycpython/tests/test_distributions.py b/binarycpython/tests/test_distributions.py
index ac6e6ac9b2622c224f00dc8a29d0cb14e1e86eb6..def8ba24139bf244c114eaa73eae8abb8873d969 100644
--- a/binarycpython/tests/test_distributions.py
+++ b/binarycpython/tests/test_distributions.py
@@ -1,6 +1,7 @@
 """
 Module containing the unittests for the distribution functions. 
 """
+
 import unittest
 
 from binarycpython.utils.distribution_functions import *
@@ -15,8 +16,10 @@ class TestDistributions(unittest.TestCase):
     """
 
     def __init__(self, *args, **kwargs):
+        """
+        init
+        """
         super(TestDistributions, self).__init__(*args, **kwargs)
-        # self.gen_stubs()
 
         self.mass_list = [0.1, 0.2, 1, 10, 15, 50]
         self.logper_list = [-2, -0.5, 1.6, 2.5, 5.3, 10]
diff --git a/binarycpython/tests/test_functions.py b/binarycpython/tests/test_functions.py
index 4a0fe570ed6f21cc66110699826314080e7fe3bc..a3ad3d9013f4cd289541e493a15f7698374bd4ab 100644
--- a/binarycpython/tests/test_functions.py
+++ b/binarycpython/tests/test_functions.py
@@ -1,3 +1,7 @@
+"""
+Unittests for the functions module
+"""
+
 import unittest
 import tempfile
 from binarycpython.utils.functions import *
@@ -19,10 +23,20 @@ binary_c_temp_dir = temp_dir()
 
 
 class dummy:
+    """
+    Dummy class to be used in the merge_dicts
+    """
+
     def __init__(self, name):
+        """
+        init
+        """
         self.name = name
 
     def __str__(self):
+        """
+        str returns self.name
+        """
         return self.name
 
 
@@ -32,9 +46,16 @@ class test_verbose_print(unittest.TestCase):
     """
 
     def test_print(self):
+        """
+        Tests whether something gets printed
+        """
         verbose_print("test1", 1, 0)
 
     def test_not_print(self):
+        """
+        Tests whether nothing gets printed.
+        """
+
         verbose_print("test1", 0, 1)
 
 
@@ -44,6 +65,10 @@ class test_remove_file(unittest.TestCase):
     """
 
     def test_remove_file(self):
+        """
+        Test to remove a file
+        """
+
         with open(
             os.path.join(binary_c_temp_dir, "test_remove_file_file.txt"), "w"
         ) as f:
@@ -52,6 +77,10 @@ class test_remove_file(unittest.TestCase):
         remove_file(os.path.join(binary_c_temp_dir, "test_remove_file_file.txt"))
 
     def test_remove_nonexisting_file(self):
+        """
+        Test to try to remove a nonexistant file
+        """
+
         file = os.path.join(binary_c_temp_dir, "test_remove_nonexistingfile_file.txt")
 
         remove_file(file)
@@ -63,6 +92,10 @@ class test_temp_dir(unittest.TestCase):
     """
 
     def test_create_temp_dir(self):
+        """
+        Test making a temp directory and comparing that to what it should be
+        """
+
         binary_c_temp_dir = temp_dir()
         general_temp_dir = tempfile.gettempdir()
 
@@ -80,6 +113,10 @@ class test_create_hdf5(unittest.TestCase):
     """
 
     def test_1(self):
+        """
+        Test that creates files, packs them in a hdf5 file and checks the contents
+        """
+
         testdir = os.path.join(binary_c_temp_dir, "test_create_hdf5")
         os.makedirs(testdir, exist_ok=True)
 
@@ -109,6 +146,10 @@ class test_return_binary_c_version_info(unittest.TestCase):
     """
 
     def test_not_parsed(self):
+        """
+        Test for the raw version_info output
+        """
+
         version_info = return_binary_c_version_info()
 
         self.assertTrue(isinstance(version_info, str))
@@ -117,6 +158,10 @@ class test_return_binary_c_version_info(unittest.TestCase):
         self.assertIn("SIGMA_THOMPSON", version_info)
 
     def test_parsed(self):
+        """
+        Test for the parssed version_info
+        """
+
         # also tests the parse_version_info indirectly
         version_info_parsed = return_binary_c_version_info(parsed=True)
 
@@ -137,6 +182,10 @@ class test_parse_binary_c_version_info(unittest.TestCase):
     """
 
     def test_1(self):
+        """
+        Test for the parsed versio info, more detailed
+        """
+
         info = return_binary_c_version_info()
         parsed_info = parse_binary_c_version_info(info)
 
@@ -155,16 +204,21 @@ class test_parse_binary_c_version_info(unittest.TestCase):
         self.assertIsNotNone(parsed_info["dt_limits"])
         self.assertIsNotNone(parsed_info["miscellaneous"])
 
-        if parsed_info['macros']['NUCSYN'] == 'on':
+        if parsed_info["macros"]["NUCSYN"] == "on":
             self.assertIsNotNone(parsed_info["isotopes"])
             self.assertIsNotNone(parsed_info["nucleosynthesis_sources"])
 
+
 class test_output_lines(unittest.TestCase):
     """
     Unittests for function output_lines
     """
 
     def test_1(self):
+        """
+        Test to check if the shape and contents of output_lines is correct
+        """
+
         example_text = "hallo\ntest\n123"
         output_1 = output_lines(example_text)
 
@@ -180,6 +234,10 @@ class test_example_parse_output(unittest.TestCase):
     """
 
     def test_normal_output(self):
+        """
+        Test checking if parsed output with a custom logging line works correctly
+        """
+
         # generate logging lines. Here you can choose whatever you want to have logged, and with what header
         # You can also decide to `write` your own logging_line, which allows you to write a more complex logging statement with conditionals.
         logging_line = 'Printf("MY_STELLAR_DATA time=%g mass=%g\\n", stardata->model.time, stardata->star[0].mass)'
@@ -205,6 +263,10 @@ class test_example_parse_output(unittest.TestCase):
         self.assertTrue(len(parsed_output["time"]) > 0)
 
     def test_mismatch_output(self):
+        """
+        Test checking if parsed output with a mismatching headerline doesnt have any contents
+        """
+
         # generate logging lines. Here you can choose whatever you want to have logged, and with what header
         # You can also decide to `write` your own logging_line, which allows you to write a more complex logging statement with conditionals.
         logging_line = 'Printf("MY_STELLAR_DATA time=%g mass=%g\\n", stardata->model.time, stardata->star[0].mass)'
@@ -232,6 +294,10 @@ class test_get_defaults(unittest.TestCase):
     """
 
     def test_no_filter(self):
+        """
+        Test checking if the defaults without filtering contains non-filtered content
+        """
+
         output_1 = get_defaults()
 
         self.assertTrue(isinstance(output_1, dict))
@@ -241,6 +307,10 @@ class test_get_defaults(unittest.TestCase):
         self.assertIn("use_fixed_timestep_%d", output_1.keys())
 
     def test_filter(self):
+        """
+        Test checking filtering works correctly
+        """
+
         # Also tests the filter_arg_dict indirectly
         output_1 = get_defaults(filter_values=True)
 
@@ -253,10 +323,14 @@ class test_get_defaults(unittest.TestCase):
 
 class test_get_arg_keys(unittest.TestCase):
     """
-    Unittests for function
+    Unittests for function get_arg_keys
     """
 
     def test_1(self):
+        """
+        Test checking if some of the keys are indeed in the list
+        """
+
         output_1 = get_arg_keys()
 
         self.assertTrue(isinstance(output_1, list))
@@ -272,16 +346,28 @@ class test_create_arg_string(unittest.TestCase):
     """
 
     def test_default(self):
+        """
+        Test checking if the argstring is correct
+        """
+
         input_dict = {"separation": 40000, "M_1": 10}
         argstring = create_arg_string(input_dict)
         self.assertEqual(argstring, "separation 40000 M_1 10")
 
     def test_sort(self):
+        """
+        Test checking if the argstring with a different ordered dict is also in a differnt order
+        """
+
         input_dict = {"M_1": 10, "separation": 40000}
         argstring = create_arg_string(input_dict, sort=True)
         self.assertEqual(argstring, "M_1 10 separation 40000")
 
-    def test_sort(self):
+    def test_filtered(self):
+        """
+        Test if filtering works
+        """
+
         input_dict = {"M_1": 10, "separation": 40000, "list_args": "NULL"}
         argstring = create_arg_string(input_dict, filter_values=True)
         self.assertEqual(argstring, "M_1 10 separation 40000")
@@ -304,10 +390,18 @@ class test_get_help(unittest.TestCase):
         )
 
     def test_no_input(self):
+        """
+        Test if the result is None if called without input
+        """
+
         output = get_help()
         self.assertIsNone(output)
 
     def test_wrong_input(self):
+        """
+        Test if the result is None if called with an unknown input
+        """
+
         output = get_help("kaasblokjes")
         self.assertIsNone(output)
 
@@ -373,6 +467,10 @@ class test_make_build_text(unittest.TestCase):
     """
 
     def test_output(self):
+        """
+        Test checking the contents of the build_text
+        """
+
         build_text = make_build_text()
 
         # Remove the things
@@ -395,6 +493,10 @@ class test_write_binary_c_parameter_descriptions_to_rst_file(unittest.TestCase):
     """
 
     def test_bad_outputname(self):
+        """
+        Test checking if None is returned when a bad input name is provided
+        """
+
         output_name = os.path.join(
             binary_c_temp_dir,
             "test_write_binary_c_parameter_descriptions_to_rst_file_test_1.txt",
@@ -403,6 +505,10 @@ class test_write_binary_c_parameter_descriptions_to_rst_file(unittest.TestCase):
         self.assertIsNone(output_1)
 
     def test_checkfile(self):
+        """
+        Test checking if the file is created correctly
+        """
+
         output_name = os.path.join(
             binary_c_temp_dir,
             "test_write_binary_c_parameter_descriptions_to_rst_file_test_1.rst",
@@ -413,10 +519,14 @@ class test_write_binary_c_parameter_descriptions_to_rst_file(unittest.TestCase):
 
 class test_inspect_dict(unittest.TestCase):
     """
-    Unittests for function
+    Unittests for function inspect_dict
     """
 
     def test_compare_dict(self):
+        """
+        Test checking if inspect_dict returns the correct structure by comparing it to known value
+        """
+
         input_dict = {
             "int": 1,
             "float": 1.2,
@@ -434,7 +544,11 @@ class test_inspect_dict(unittest.TestCase):
         }
         self.assertTrue(compare_dict == output_dict)
 
-    def test_compare_dict(self):
+    def test_compare_dict_with_print(self):
+        """
+        Test checking output is printed
+        """
+
         input_dict = {
             "int": 1,
             "float": 1.2,
@@ -447,10 +561,14 @@ class test_inspect_dict(unittest.TestCase):
 
 class test_merge_dicts(unittest.TestCase):
     """
-    Unittests for function
+    Unittests for function merge_dicts
     """
 
     def test_empty(self):
+        """
+        Test merging an empty dict
+        """
+
         input_dict = {
             "int": 1,
             "float": 1.2,
@@ -463,12 +581,20 @@ class test_merge_dicts(unittest.TestCase):
         self.assertTrue(output_dict == input_dict)
 
     def test_unequal_types(self):
+        """
+        Test merging unequal types: should raise valueError
+        """
+
         dict_1 = {"input": 10}
         dict_2 = {"input": "hello"}
 
         self.assertRaises(ValueError, merge_dicts, dict_1, dict_2)
 
     def test_bools(self):
+        """
+        Test merging dict with booleans
+        """
+
         dict_1 = {"bool": True}
         dict_2 = {"bool": False}
         output_dict = merge_dicts(dict_1, dict_2)
@@ -477,6 +603,10 @@ class test_merge_dicts(unittest.TestCase):
         self.assertTrue(output_dict["bool"])
 
     def test_ints(self):
+        """
+        Test merging dict with ints
+        """
+
         dict_1 = {"int": 2}
         dict_2 = {"int": 1}
         output_dict = merge_dicts(dict_1, dict_2)
@@ -485,6 +615,10 @@ class test_merge_dicts(unittest.TestCase):
         self.assertEqual(output_dict["int"], 3)
 
     def test_floats(self):
+        """
+        Test merging dict with floats
+        """
+
         dict_1 = {"float": 4.5}
         dict_2 = {"float": 4.6}
         output_dict = merge_dicts(dict_1, dict_2)
@@ -493,6 +627,10 @@ class test_merge_dicts(unittest.TestCase):
         self.assertEqual(output_dict["float"], 9.1)
 
     def test_lists(self):
+        """
+        Test merging dict with lists
+        """
+
         dict_1 = {"list": [1, 2]}
         dict_2 = {"list": [3, 4]}
         output_dict = merge_dicts(dict_1, dict_2)
@@ -501,6 +639,10 @@ class test_merge_dicts(unittest.TestCase):
         self.assertEqual(output_dict["list"], [1, 2, 3, 4])
 
     def test_dicts(self):
+        """
+        Test merging dict with dicts
+        """
+
         dict_1 = {"dict": {"same": 1, "other_1": 2.0}}
         dict_2 = {"dict": {"same": 2, "other_2": [4.0]}}
         output_dict = merge_dicts(dict_1, dict_2)
@@ -511,6 +653,10 @@ class test_merge_dicts(unittest.TestCase):
         )
 
     def test_unsupported(self):
+        """
+        Test merging dict with unsupported types. should raise ValueError
+        """
+
         dict_1 = {"new": dummy("david")}
         dict_2 = {"new": dummy("gio")}
 
@@ -524,11 +670,19 @@ class test_binaryc_json_serializer(unittest.TestCase):
     """
 
     def test_not_function(self):
+        """
+        Test passing an object that doesnt get turned in to a string
+        """
+
         stringo = "hello"
         output = binaryc_json_serializer(stringo)
         self.assertTrue(stringo == output)
 
     def test_function(self):
+        """
+        Test passing an object that gets turned in to a string: a function
+        """
+
         string_of_function = str(os.path.isfile)
         output = binaryc_json_serializer(os.path.isfile)
         self.assertTrue(string_of_function == output)
@@ -540,6 +694,10 @@ class test_handle_ensemble_string_to_json(unittest.TestCase):
     """
 
     def test_1(self):
+        """
+        Test passing string representation of a dictionary.
+        """
+
         string_of_function = str(os.path.isfile)
         input_string = '{"ding": 10, "list_example": [1,2,3]}'
         output_dict = handle_ensemble_string_to_json(input_string)
diff --git a/binarycpython/tests/test_grid.py b/binarycpython/tests/test_grid.py
index 7aec47622301b68957036d54a1725e4f70d14155..895bd08fdc9cd2ddf2f10641ea3c58b48ae12e0b 100644
--- a/binarycpython/tests/test_grid.py
+++ b/binarycpython/tests/test_grid.py
@@ -14,31 +14,42 @@ import datetime
 import numpy as np
 
 from binarycpython.utils.grid import Population
-from binarycpython.utils.functions import temp_dir, extract_ensemble_json_from_string, merge_dicts, remove_file
+from binarycpython.utils.functions import (
+    temp_dir,
+    extract_ensemble_json_from_string,
+    merge_dicts,
+    remove_file,
+)
 from binarycpython.utils.custom_logging_functions import binary_c_log_code
 
 binary_c_temp_dir = temp_dir()
 
+
 def parse_function_test_grid_evolve_2_threads_with_custom_logging(self, output):
     """
     Simple parse function that directly appends all the output to a file
     """
 
-    # Get some information from the 
-    data_dir = self.custom_options['data_dir']
+    # Get some information from the
+    data_dir = self.custom_options["data_dir"]
 
     # make outputfilename
-    output_filename = os.path.join(data_dir, "test_grid_evolve_2_threads_with_custom_logging_outputfile_population_{}_thread_{}.dat".format(self.grid_options['_population_id'], self.process_ID))
+    output_filename = os.path.join(
+        data_dir,
+        "test_grid_evolve_2_threads_with_custom_logging_outputfile_population_{}_thread_{}.dat".format(
+            self.grid_options["_population_id"], self.process_ID
+        ),
+    )
 
     # Check directory, make if necessary
     os.makedirs(data_dir, exist_ok=True)
 
-    if not os.path.exists(output_filename):        
-        with open(output_filename, 'w') as first_f:
-            first_f.write(output+'\n')
+    if not os.path.exists(output_filename):
+        with open(output_filename, "w") as first_f:
+            first_f.write(output + "\n")
     else:
-        with open(output_filename, 'a') as first_f:
-            first_f.write(output+'\n')
+        with open(output_filename, "a") as first_f:
+            first_f.write(output + "\n")
 
 
 # class test_(unittest.TestCase):
@@ -61,6 +72,9 @@ class test_Population(unittest.TestCase):
     """
 
     def test_setup(self):
+        """
+        Unittests for function _setup
+        """
         test_pop = Population()
 
         self.assertTrue("orbital_period" in test_pop.defaults)
@@ -74,6 +88,10 @@ class test_Population(unittest.TestCase):
         self.assertTrue(isinstance(test_pop.grid_options["_main_pid"], int))
 
     def test_set(self):
+        """
+        Unittests for function set
+        """
+
         test_pop = Population()
         test_pop.set(amt_cores=2)
         test_pop.set(M_1=10)
@@ -91,6 +109,10 @@ class test_Population(unittest.TestCase):
         self.assertTrue(test_pop.grid_options["amt_cores"] == 2)
 
     def test_cmdline(self):
+        """
+        Unittests for function parse_cmdline
+        """
+
         # copy old sys.argv values
         prev_sysargv = sys.argv.copy()
 
@@ -233,7 +255,7 @@ class test_Population(unittest.TestCase):
         self.assertIsNotNone(binary_c_version_info["dt_limits"])
         self.assertIsNotNone(binary_c_version_info["miscellaneous"])
 
-        if binary_c_version_info['macros']['NUCSYN'] == 'on':
+        if binary_c_version_info["macros"]["NUCSYN"] == "on":
             self.assertIsNotNone(binary_c_version_info["isotopes"])
             self.assertIsNotNone(binary_c_version_info["nucleosynthesis_sources"])
 
@@ -395,31 +417,41 @@ class test_Population(unittest.TestCase):
         """
 
         test_pop = Population()
-        test_pop.set(M_1=10, M_2=5, orbital_period=100000, metallicty=0.02, max_evolution_time = 15000)
+        test_pop.set(
+            M_1=10,
+            M_2=5,
+            orbital_period=100000,
+            metallicty=0.02,
+            max_evolution_time=15000,
+        )
 
         test_pop.set(C_logging_code=CUSTOM_LOGGING_STRING_MASSES)
 
         output = test_pop.evolve_single()
 
-        # 
-        self.assertTrue(len(output.splitlines())>1)
-        self.assertIn('TEST_CUSTOM_LOGGING_1', output)
-
+        #
+        self.assertTrue(len(output.splitlines()) > 1)
+        self.assertIn("TEST_CUSTOM_LOGGING_1", output)
 
-        # 
-        custom_logging_dict = {
-            'TEST_CUSTOM_LOGGING_2': ['star[0].mass', 'model.time']
-        }
+        #
+        custom_logging_dict = {"TEST_CUSTOM_LOGGING_2": ["star[0].mass", "model.time"]}
         test_pop_2 = Population()
-        test_pop_2.set(M_1=10, M_2=5, orbital_period=100000, metallicty=0.02, max_evolution_time = 15000)
+        test_pop_2.set(
+            M_1=10,
+            M_2=5,
+            orbital_period=100000,
+            metallicty=0.02,
+            max_evolution_time=15000,
+        )
 
         test_pop_2.set(C_auto_logging=custom_logging_dict)
 
         output_2 = test_pop_2.evolve_single()
 
-        # 
-        self.assertTrue(len(output_2.splitlines())>1)
-        self.assertIn('TEST_CUSTOM_LOGGING_2', output_2)
+        #
+        self.assertTrue(len(output_2.splitlines()) > 1)
+        self.assertIn("TEST_CUSTOM_LOGGING_2", output_2)
+
 
 class test_grid_evolve(unittest.TestCase):
     """
@@ -427,10 +459,14 @@ class test_grid_evolve(unittest.TestCase):
     """
 
     def test_grid_evolve_1_thread(self):
-        # test to see if 1 thread does all the systems
+        """
+        Unittests to see if 1 thread does all the systems
+        """
 
         test_pop_evolve_1_thread = Population()
-        test_pop_evolve_1_thread.set(amt_cores=1, verbosity=1, M_2=1, orbital_period=100000)
+        test_pop_evolve_1_thread.set(
+            amt_cores=1, verbosity=1, M_2=1, orbital_period=100000
+        )
 
         resolution = {"M_1": 10}
 
@@ -450,11 +486,15 @@ class test_grid_evolve(unittest.TestCase):
         )
 
         analytics = test_pop_evolve_1_thread.evolve()
-        self.assertLess(np.abs(analytics['total_probability']-0.1503788456014623), 1e-10)
-        self.assertTrue(analytics['total_count']==10)
+        self.assertLess(
+            np.abs(analytics["total_probability"] - 0.1503788456014623), 1e-10
+        )
+        self.assertTrue(analytics["total_count"] == 10)
 
     def test_grid_evolve_2_threads(self):
-        # test to see if 1 thread does all the systems
+        """
+        Unittests to see if multiple threads handle the all the systems correctly
+        """
 
         test_pop = Population()
         test_pop.set(amt_cores=2, verbosity=1, M_2=1, orbital_period=100000)
@@ -477,25 +517,31 @@ class test_grid_evolve(unittest.TestCase):
         )
 
         analytics = test_pop.evolve()
-        self.assertLess(np.abs(analytics['total_probability']-0.1503788456014623), 1e-10) # 
-        self.assertTrue(analytics['total_count']==10)
+        self.assertLess(
+            np.abs(analytics["total_probability"] - 0.1503788456014623), 1e-10
+        )  #
+        self.assertTrue(analytics["total_count"] == 10)
 
-    def test_grid_evolve_1_threads_with_custom_logging(self):
-        # test to see if 1 thread does all the systems
+    def test_grid_evolve_2_threads_with_custom_logging(self):
+        """
+        Unittests to see if multiple threads do the custom logging correctly
+        """
 
-        data_dir_value = os.path.join(binary_c_temp_dir, 'grid_tests')
+        data_dir_value = os.path.join(binary_c_temp_dir, "grid_tests")
         amt_cores_value = 2
         custom_logging_string = 'Printf("MY_STELLAR_DATA_TEST_EXAMPLE %g %g %g %g\\n",((double)stardata->model.time),((double)stardata->star[0].mass),((double)stardata->model.probability),((double)stardata->model.dt));'
 
         test_pop = Population()
 
-        test_pop.set(amt_cores=amt_cores_value, 
-            verbosity=1, 
-            M_2=1, 
-            orbital_period=100000, 
+        test_pop.set(
+            amt_cores=amt_cores_value,
+            verbosity=1,
+            M_2=1,
+            orbital_period=100000,
             data_dir=data_dir_value,
-            C_logging_code=custom_logging_string, # input it like this. 
-            parse_function=parse_function_test_grid_evolve_2_threads_with_custom_logging)
+            C_logging_code=custom_logging_string,  # input it like this.
+            parse_function=parse_function_test_grid_evolve_2_threads_with_custom_logging,
+        )
         test_pop.set(ensemble=0)
         resolution = {"M_1": 2}
 
@@ -515,12 +561,20 @@ class test_grid_evolve(unittest.TestCase):
         )
 
         analytics = test_pop.evolve()
-        output_names = [os.path.join(data_dir_value, "test_grid_evolve_2_threads_with_custom_logging_outputfile_population_{}_thread_{}.dat".format(analytics['population_name'], thread_id)) for thread_id in range(amt_cores_value)]
+        output_names = [
+            os.path.join(
+                data_dir_value,
+                "test_grid_evolve_2_threads_with_custom_logging_outputfile_population_{}_thread_{}.dat".format(
+                    analytics["population_name"], thread_id
+                ),
+            )
+            for thread_id in range(amt_cores_value)
+        ]
 
         for output_name in output_names:
             self.assertTrue(os.path.isfile(output_name))
 
-            with open(output_name, 'r') as f:
+            with open(output_name, "r") as f:
                 output_string = f.read()
 
             self.assertIn("MY_STELLAR_DATA_TEST_EXAMPLE", output_string)
@@ -528,7 +582,9 @@ class test_grid_evolve(unittest.TestCase):
             remove_file(output_name)
 
     def test_grid_evolve_with_condition_error(self):
-        # Test to see if we can catch the errors correctly. 
+        """
+        Unittests to see if the threads catch the errors correctly.
+        """
 
         test_pop = Population()
         test_pop.set(amt_cores=2, verbosity=1, M_2=1, orbital_period=100000)
@@ -570,13 +626,15 @@ class test_grid_evolve(unittest.TestCase):
         )
 
         analytics = test_pop.evolve()
-        self.assertLess(np.abs(analytics['total_probability']-0.1503788456014623), 1e-10) # 
-        self.assertLess(np.abs(analytics['failed_prob']-0.1503788456014623), 1e-10) # 
-        self.assertEqual(analytics['failed_systems_error_codes'], [0])
-        self.assertTrue(analytics['total_count']==10)
-        self.assertTrue(analytics['failed_count']==10)
-        self.assertTrue(analytics['errors_found']==True)
-        self.assertTrue(analytics['errors_exceeded']==True)
+        self.assertLess(
+            np.abs(analytics["total_probability"] - 0.1503788456014623), 1e-10
+        )  #
+        self.assertLess(np.abs(analytics["failed_prob"] - 0.1503788456014623), 1e-10)  #
+        self.assertEqual(analytics["failed_systems_error_codes"], [0])
+        self.assertTrue(analytics["total_count"] == 10)
+        self.assertTrue(analytics["failed_count"] == 10)
+        self.assertTrue(analytics["errors_found"] == True)
+        self.assertTrue(analytics["errors_exceeded"] == True)
 
         # test to see if 1 thread does all the systems
 
@@ -611,13 +669,15 @@ class test_grid_evolve(unittest.TestCase):
             precode="M_2 = q * M_1",
             parameter_name="M_2",
             # condition="M_1 in dir()",  # Impose a condition on this grid variable. Mostly for a check for yourself
-            condition="'random_var' in dir()", # This will raise an error because random_var is not defined. 
+            condition="'random_var' in dir()",  # This will raise an error because random_var is not defined.
         )
 
         self.assertRaises(ValueError, test_pop.evolve)
 
     def test_grid_evolve_no_grid_variables(self):
-        # test to see if 1 thread does all the systems
+        """
+        Unittests to see if errors are raised if there are no grid variables
+        """
 
         test_pop = Population()
         test_pop.set(amt_cores=1, verbosity=1, M_2=1, orbital_period=100000)
@@ -626,14 +686,29 @@ class test_grid_evolve(unittest.TestCase):
         self.assertRaises(ValueError, test_pop.evolve)
 
     def test_grid_evolve_2_threads_with_ensemble_direct_output(self):
-        # test to see if 1 thread does all the systems
+        """
+        Unittests to see if multiple threads output the ensemble information to files correctly
+        """
 
         data_dir_value = binary_c_temp_dir
         amt_cores_value = 2
 
         test_pop = Population()
-        test_pop.set(amt_cores=amt_cores_value, verbosity=1, M_2=1, orbital_period=100000, ensemble=1, ensemble_defer=1, ensemble_filters_off=1, ensemble_filter_STELLAR_TYPE_COUNTS=1)
-        test_pop.set(data_dir=binary_c_temp_dir, ensemble_output_name="ensemble_output.json", combine_ensemble_with_thread_joining=False)
+        test_pop.set(
+            amt_cores=amt_cores_value,
+            verbosity=1,
+            M_2=1,
+            orbital_period=100000,
+            ensemble=1,
+            ensemble_defer=1,
+            ensemble_filters_off=1,
+            ensemble_filter_STELLAR_TYPE_COUNTS=1,
+        )
+        test_pop.set(
+            data_dir=binary_c_temp_dir,
+            ensemble_output_name="ensemble_output.json",
+            combine_ensemble_with_thread_joining=False,
+        )
 
         resolution = {"M_1": 10}
 
@@ -653,12 +728,20 @@ class test_grid_evolve(unittest.TestCase):
         )
 
         analytics = test_pop.evolve()
-        output_names = [os.path.join(data_dir_value, "ensemble_output_{}_{}.json".format(analytics['population_name'], thread_id)) for thread_id in range(amt_cores_value)]
+        output_names = [
+            os.path.join(
+                data_dir_value,
+                "ensemble_output_{}_{}.json".format(
+                    analytics["population_name"], thread_id
+                ),
+            )
+            for thread_id in range(amt_cores_value)
+        ]
 
         for output_name in output_names:
             self.assertTrue(os.path.isfile(output_name))
 
-            with open(output_name, 'r') as f:
+            with open(output_name, "r") as f:
                 file_content = f.read()
 
                 self.assertTrue(file_content.startswith("ENSEMBLE_JSON"))
@@ -672,14 +755,29 @@ class test_grid_evolve(unittest.TestCase):
                 self.assertNotEqual(ensemble_json["number_counts"], {})
 
     def test_grid_evolve_2_threads_with_ensemble_combining(self):
-        # test to see if 1 thread does all the systems
+        """
+        Unittests to see if multiple threads correclty combine the ensemble data and store them in the grid
+        """
 
         data_dir_value = binary_c_temp_dir
         amt_cores_value = 2
 
         test_pop = Population()
-        test_pop.set(amt_cores=amt_cores_value, verbosity=1, M_2=1, orbital_period=100000, ensemble=1, ensemble_defer=1, ensemble_filters_off=1, ensemble_filter_STELLAR_TYPE_COUNTS=1)
-        test_pop.set(data_dir=binary_c_temp_dir, combine_ensemble_with_thread_joining=True, ensemble_output_name="ensemble_output.json")
+        test_pop.set(
+            amt_cores=amt_cores_value,
+            verbosity=1,
+            M_2=1,
+            orbital_period=100000,
+            ensemble=1,
+            ensemble_defer=1,
+            ensemble_filters_off=1,
+            ensemble_filter_STELLAR_TYPE_COUNTS=1,
+        )
+        test_pop.set(
+            data_dir=binary_c_temp_dir,
+            combine_ensemble_with_thread_joining=True,
+            ensemble_output_name="ensemble_output.json",
+        )
 
         resolution = {"M_1": 10}
 
@@ -700,22 +798,39 @@ class test_grid_evolve(unittest.TestCase):
 
         analytics = test_pop.evolve()
 
-        self.assertTrue(isinstance(test_pop.grid_options['ensemble_results'], dict))
-        self.assertNotEqual(test_pop.grid_options['ensemble_results'], {})
-        
-        self.assertIn("number_counts", test_pop.grid_options['ensemble_results'])
-        self.assertNotEqual(test_pop.grid_options['ensemble_results']["number_counts"], {})
+        self.assertTrue(isinstance(test_pop.grid_options["ensemble_results"], dict))
+        self.assertNotEqual(test_pop.grid_options["ensemble_results"], {})
+
+        self.assertIn("number_counts", test_pop.grid_options["ensemble_results"])
+        self.assertNotEqual(
+            test_pop.grid_options["ensemble_results"]["number_counts"], {}
+        )
 
     def test_grid_evolve_2_threads_with_ensemble_comparing_two_methods(self):
-        # test to see if 1 thread does all the systems
+        """
+        Unittests to compare the method of storing the combined ensemble data in the object and writing them to files and combining them later. they have to be the same
+        """
 
         data_dir_value = binary_c_temp_dir
         amt_cores_value = 2
 
         # First
         test_pop_1 = Population()
-        test_pop_1.set(amt_cores=amt_cores_value, verbosity=1, M_2=1, orbital_period=100000, ensemble=1, ensemble_defer=1, ensemble_filters_off=1, ensemble_filter_STELLAR_TYPE_COUNTS=1)
-        test_pop_1.set(data_dir=binary_c_temp_dir, combine_ensemble_with_thread_joining=True, ensemble_output_name="ensemble_output.json")
+        test_pop_1.set(
+            amt_cores=amt_cores_value,
+            verbosity=1,
+            M_2=1,
+            orbital_period=100000,
+            ensemble=1,
+            ensemble_defer=1,
+            ensemble_filters_off=1,
+            ensemble_filter_STELLAR_TYPE_COUNTS=1,
+        )
+        test_pop_1.set(
+            data_dir=binary_c_temp_dir,
+            combine_ensemble_with_thread_joining=True,
+            ensemble_output_name="ensemble_output.json",
+        )
 
         resolution = {"M_1": 10}
 
@@ -735,12 +850,25 @@ class test_grid_evolve(unittest.TestCase):
         )
 
         analytics_1 = test_pop_1.evolve()
-        ensemble_output_1 = test_pop_1.grid_options['ensemble_results']
+        ensemble_output_1 = test_pop_1.grid_options["ensemble_results"]
 
         # second
         test_pop_2 = Population()
-        test_pop_2.set(amt_cores=amt_cores_value, verbosity=1, M_2=1, orbital_period=100000, ensemble=1, ensemble_defer=1, ensemble_filters_off=1, ensemble_filter_STELLAR_TYPE_COUNTS=1)
-        test_pop_2.set(data_dir=binary_c_temp_dir, ensemble_output_name="ensemble_output.json", combine_ensemble_with_thread_joining=False)
+        test_pop_2.set(
+            amt_cores=amt_cores_value,
+            verbosity=1,
+            M_2=1,
+            orbital_period=100000,
+            ensemble=1,
+            ensemble_defer=1,
+            ensemble_filters_off=1,
+            ensemble_filter_STELLAR_TYPE_COUNTS=1,
+        )
+        test_pop_2.set(
+            data_dir=binary_c_temp_dir,
+            ensemble_output_name="ensemble_output.json",
+            combine_ensemble_with_thread_joining=False,
+        )
 
         resolution = {"M_1": 10}
 
@@ -760,13 +888,21 @@ class test_grid_evolve(unittest.TestCase):
         )
 
         analytics_2 = test_pop_2.evolve()
-        output_names_2 = [os.path.join(data_dir_value, "ensemble_output_{}_{}.json".format(analytics_2['population_name'], thread_id)) for thread_id in range(amt_cores_value)]
+        output_names_2 = [
+            os.path.join(
+                data_dir_value,
+                "ensemble_output_{}_{}.json".format(
+                    analytics_2["population_name"], thread_id
+                ),
+            )
+            for thread_id in range(amt_cores_value)
+        ]
         ensemble_output_2 = {}
 
         for output_name in output_names_2:
             self.assertTrue(os.path.isfile(output_name))
 
-            with open(output_name, 'r') as f:
+            with open(output_name, "r") as f:
                 file_content = f.read()
 
                 self.assertTrue(file_content.startswith("ENSEMBLE_JSON"))
@@ -775,11 +911,18 @@ class test_grid_evolve(unittest.TestCase):
 
                 ensemble_output_2 = merge_dicts(ensemble_output_2, ensemble_json)
 
-        for key in ensemble_output_1['number_counts']['stellar_type']['0']:
-            self.assertIn(key, ensemble_output_2['number_counts']['stellar_type']['0'])
+        for key in ensemble_output_1["number_counts"]["stellar_type"]["0"]:
+            self.assertIn(key, ensemble_output_2["number_counts"]["stellar_type"]["0"])
 
             # compare values
-            self.assertLess(np.abs(ensemble_output_1['number_counts']['stellar_type']['0'][key]-ensemble_output_2['number_counts']['stellar_type']['0'][key]), 1e-8)
+            self.assertLess(
+                np.abs(
+                    ensemble_output_1["number_counts"]["stellar_type"]["0"][key]
+                    - ensemble_output_2["number_counts"]["stellar_type"]["0"][key]
+                ),
+                1e-8,
+            )
+
 
 if __name__ == "__main__":
     unittest.main()
diff --git a/binarycpython/tests/test_grid_options_defaults.py b/binarycpython/tests/test_grid_options_defaults.py
index 23e879a6cbb68f335f84524edd3d825eb1cc7705..d344e32163e296fa9dee4149903971b152f2f2cf 100644
--- a/binarycpython/tests/test_grid_options_defaults.py
+++ b/binarycpython/tests/test_grid_options_defaults.py
@@ -1,3 +1,7 @@
+"""
+Unittests for grid_options_defaults module
+"""
+
 import unittest
 
 from binarycpython.utils.grid_options_defaults import *
@@ -67,5 +71,6 @@ class test_grid_options_defaults(unittest.TestCase):
 
         self.assertTrue(os.path.isfile(input_2))
 
+
 if __name__ == "__main__":
     unittest.main()
diff --git a/binarycpython/tests/test_hpc_functions.py b/binarycpython/tests/test_hpc_functions.py
index f0c4a49152e23df2cbd35ed1d62916125ee4c2f3..ec173924927700601d45d3c9c88949a5679e9149 100644
--- a/binarycpython/tests/test_hpc_functions.py
+++ b/binarycpython/tests/test_hpc_functions.py
@@ -1 +1,5 @@
+"""
+Unittests for hpc_functions module
+"""
+
 from binarycpython.utils.hpc_functions import *
diff --git a/binarycpython/tests/test_plot_functions.py b/binarycpython/tests/test_plot_functions.py
index 4b01c1b448f1a6819c5fd25a3850fbed94088017..30b813a62332e118a84624ed94ef793b2d404b96 100644
--- a/binarycpython/tests/test_plot_functions.py
+++ b/binarycpython/tests/test_plot_functions.py
@@ -1,3 +1,7 @@
+"""
+Unittests for plot_functions
+"""
+
 import unittest
 import numpy as np
 from binarycpython.utils.plot_functions import *
@@ -18,6 +22,10 @@ class test_color_by_index(unittest.TestCase):
     """
 
     def test_1(self):
+        """
+        First test
+        """
+
         colors = ["red", "white", "blue"]
 
         color = color_by_index([1, 2, 3], 1, colors)
@@ -30,6 +38,10 @@ class test_plot_system(unittest.TestCase):
     """
 
     def test_mass_evolution_plot(self):
+        """
+        Test for setting plot_type = "mass_evolution"
+        """
+
         plot_type = "mass_evolution"
         show_plot = False
         output_fig_1 = plot_system(
@@ -59,6 +71,10 @@ class test_plot_system(unittest.TestCase):
         # output_fig_2 = plot_system(plot_type, show_plot=show_plot, M_1=1, metallicity=0.002, M_2=0.1, separation=0, orbital_period=100000000000)
 
     def test_orbit_evolution_plot(self):
+        """
+        Test for setting plot_type = "orbit_evolution"
+        """
+
         plot_type = "orbit_evolution"
         show_plot = False
         output_fig_1 = plot_system(
@@ -88,6 +104,10 @@ class test_plot_system(unittest.TestCase):
         # output_fig_2 = plot_system(plot_type, show_plot=show_plot, M_1=1, metallicity=0.002, M_2=0.1, separation=0, orbital_period=100000000000)
 
     def test_hr_diagram_plot(self):
+        """
+        Test for setting plot_type = "hr_diagram"
+        """
+
         plot_type = "hr_diagram"
         show_plot = False
         output_fig_1 = plot_system(
@@ -117,6 +137,10 @@ class test_plot_system(unittest.TestCase):
         # output_fig_2 = plot_system(plot_type, show_plot=show_plot, M_1=1, metallicity=0.002, M_2=0.1, separation=0, orbital_period=100000000000)
 
     def test_unknown_plottype(self):
+        """
+        Test for non-existant setting plot_type = "hr_diagram"
+        """
+
         plot_type = "random"
         self.assertRaises(ValueError, plot_system, plot_type)
 
diff --git a/binarycpython/tests/test_run_system_wrapper.py b/binarycpython/tests/test_run_system_wrapper.py
index 098968360c11ca75ef682883bd6307621fc9693e..97558a9607b2bfa180da5445a96505b7ed92515a 100644
--- a/binarycpython/tests/test_run_system_wrapper.py
+++ b/binarycpython/tests/test_run_system_wrapper.py
@@ -1 +1,5 @@
+"""
+Unittests for run_system_wrapper
+"""
+
 from binarycpython.utils.run_system_wrapper import *
diff --git a/binarycpython/tests/test_spacing_functions.py b/binarycpython/tests/test_spacing_functions.py
index 4cf06ce27983e2e5c272615e995b98b733550d92..72b6f3a63acfa5563c4057893e3ff5b82ad91adb 100644
--- a/binarycpython/tests/test_spacing_functions.py
+++ b/binarycpython/tests/test_spacing_functions.py
@@ -1,3 +1,8 @@
+"""
+Unittests for spacing_functions module
+"""
+
+
 import unittest
 import numpy as np
 from binarycpython.utils.spacing_functions import *
@@ -9,7 +14,12 @@ class test_spacing_functions(unittest.TestCase):
     """
 
     def test_const(self):
+        """
+        Unittest for function const
+        """
+
         const_return = const(1, 10, 10)
-        self.assertTrue((const_return==np.linspace(1, 10, 10)).all(),
+        self.assertTrue(
+            (const_return == np.linspace(1, 10, 10)).all(),
             msg="Output didn't contain SINGLE_STAR_LIFETIME",
         )
diff --git a/binarycpython/tests/test_stellar_types.py b/binarycpython/tests/test_stellar_types.py
index 0b86a5a70e10e463e443f278bd5e938328ec8aca..7091211b5fa19af97716a067d6f3bab6a9c8a09d 100644
--- a/binarycpython/tests/test_stellar_types.py
+++ b/binarycpython/tests/test_stellar_types.py
@@ -1,3 +1,7 @@
+"""
+Unittests for stellar_types module
+"""
+
 import unittest
 
 from binarycpython.utils.stellar_types import *
diff --git a/binarycpython/tests/test_useful_funcs.py b/binarycpython/tests/test_useful_funcs.py
index b861f595ce8c05122996b186f9453dc0efbfc9df..d7f77d1d9d7a5bcea63ff31d3f8d0f657e7db51e 100644
--- a/binarycpython/tests/test_useful_funcs.py
+++ b/binarycpython/tests/test_useful_funcs.py
@@ -1,3 +1,7 @@
+"""
+Unittests for useful_funcs module
+"""
+
 import unittest
 import numpy as np
 from binarycpython.utils.useful_funcs import *
@@ -19,6 +23,10 @@ class test_calc_period_from_sep(unittest.TestCase):
     """
 
     def test_1(self):
+        """
+        First test
+        """
+
         output_1 = calc_period_from_sep(1, 1, 1)
         self.assertEqual(output_1, 0.08188845248066838)
 
@@ -31,6 +39,9 @@ class test_calc_sep_from_period(unittest.TestCase):
     """
 
     def test_1(self):
+        """
+        First test
+        """
 
         output_1 = calc_sep_from_period(1, 1, 1)
         self.assertEqual(output_1, 5.302958446503317)
@@ -42,6 +53,10 @@ class test_roche_lobe(unittest.TestCase):
     """
 
     def test_1(self):
+        """
+        First test
+        """
+
         mass_donor = 2
         mass_accretor = 1
 
@@ -57,6 +72,10 @@ class test_ragb(unittest.TestCase):
     """
 
     def test_1(self):
+        """
+        First test
+        """
+
         m = 20
         output = ragb(m, 0.02)
 
@@ -69,6 +88,10 @@ class test_rzams(unittest.TestCase):
     """
 
     def test_1(self):
+        """
+        First test
+        """
+
         mass = 0.5
         metallicity = 0.02
         output_1 = rzams(mass, metallicity)
@@ -94,6 +117,10 @@ class test_zams_collission(unittest.TestCase):
     """
 
     def test_1(self):
+        """
+        First test
+        """
+
         mass1 = 1
         mass2 = 10
         sep = 10
diff --git a/binarycpython/utils/__init__.py b/binarycpython/utils/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..42a126ad073fc6dc96ca721d316abcd1aaa5a99c 100644
--- a/binarycpython/utils/__init__.py
+++ b/binarycpython/utils/__init__.py
@@ -0,0 +1,3 @@
+"""
+Init module for binarycpython.utils
+"""
diff --git a/binarycpython/utils/functions.py b/binarycpython/utils/functions.py
index 16b79ba6f62e89dce251217b16ee355ac2f8a107..c1ad1f771a801d9f10c36446e27755d0c134ebd1 100644
--- a/binarycpython/utils/functions.py
+++ b/binarycpython/utils/functions.py
@@ -71,7 +71,11 @@ def remove_file(file: str, verbosity: int = 0) -> None:
         except FileNotFoundError as inst:
             print("Error while deleting file {}: {}".format(file, inst))
     else:
-        verbose_print("File/directory {} doesn't exist. Can't remove it.".format(file), verbosity, 1)
+        verbose_print(
+            "File/directory {} doesn't exist. Can't remove it.".format(file),
+            verbosity,
+            1,
+        )
 
 
 def temp_dir(*args: str) -> str:
@@ -100,6 +104,7 @@ def temp_dir(*args: str) -> str:
 
     return path
 
+
 def create_hdf5(data_dir: str, name: str) -> None:
     """
     Function to create an hdf5 file from the contents of a directory:
@@ -191,7 +196,7 @@ def return_binary_c_version_info(parsed: bool = False) -> Union[str, dict]:
         found_prev = True
         prev_value = os.environ["BINARY_C_MACRO_HEADER"]
 
-    # 
+    #
     os.environ["BINARY_C_MACRO_HEADER"] = "macroxyz"
 
     # Get version_info
@@ -210,6 +215,7 @@ def return_binary_c_version_info(parsed: bool = False) -> Union[str, dict]:
 
     return version_info
 
+
 def parse_binary_c_version_info(version_info_string: str) -> dict:
     """
     Function that parses the binary_c version info. Long function with a lot of branches
@@ -403,16 +409,12 @@ def parse_binary_c_version_info(version_info_string: str) -> dict:
 
     # filter out git url
     git_url = [el for el in cleaned if el.startswith("git URL")]
-    misc_dict["git_url"] = (
-        git_url[0].split("git URL ")[-1].replace('"', "")
-    )
+    misc_dict["git_url"] = git_url[0].split("git URL ")[-1].replace('"', "")
     cleaned = cleaned - set(git_url)
 
     # filter out version
     version = [el for el in cleaned if el.startswith("Version")]
-    misc_dict["version"] = (
-        str(version[0].split("Version ")[-1])
-    )
+    misc_dict["version"] = str(version[0].split("Version ")[-1])
     cleaned = cleaned - set(version)
 
     git_branch = [el for el in cleaned if el.startswith("git branch")]
@@ -1168,14 +1170,21 @@ def merge_dicts(dict_1: dict, dict_2: dict) -> dict:
 
         # See whether the types are actually the same
         if not type(dict_1[key]) is type(dict_2[key]):
-            # Exceptions: 
-            if (type(dict_1[key]) in [int, float]) and (type(dict_2[key]) in [int, float]):
+            # Exceptions:
+            if (type(dict_1[key]) in [int, float]) and (
+                type(dict_2[key]) in [int, float]
+            ):
                 new_dict[key] = dict_1[key] + dict_2[key]
 
             else:
                 print(
                     "Error key: {} value: {} type: {} and key: {} value: {} type: {} are not of the same type and cannot be merged".format(
-                        key, dict_1[key], type(dict_1[key]), key, dict_2[key], type(dict_2[key])
+                        key,
+                        dict_1[key],
+                        type(dict_1[key]),
+                        key,
+                        dict_2[key],
+                        type(dict_2[key]),
                     )
                 )
                 raise ValueError
@@ -1215,7 +1224,8 @@ def merge_dicts(dict_1: dict, dict_2: dict) -> dict:
     #
     return new_dict
 
-def extract_ensemble_json_from_string(binary_c_output: str) -> dict: 
+
+def extract_ensemble_json_from_string(binary_c_output: str) -> dict:
     """
     Function to extract the ensemble_json information from a raw binary_c output string
 
@@ -1230,26 +1240,41 @@ def extract_ensemble_json_from_string(binary_c_output: str) -> dict:
 
     try:
         ensemble_jsons_strings = [
-            line for line in binary_c_output.splitlines() if line.startswith("ENSEMBLE_JSON")
+            line
+            for line in binary_c_output.splitlines()
+            if line.startswith("ENSEMBLE_JSON")
         ]
 
         json = handle_ensemble_string_to_json(
             ensemble_jsons_strings[0][len("ENSEMBLE_JSON ") :]
         )
 
-        if len(ensemble_jsons_strings)>1:
-            verbose_print("Warning: There is more than one line starting with ENSEMBLE_JSON. Taking the first, but you should check this out.",1, 0)
+        if len(ensemble_jsons_strings) > 1:
+            verbose_print(
+                "Warning: There is more than one line starting with ENSEMBLE_JSON. Taking the first, but you should check this out.",
+                1,
+                0,
+            )
     except IndexError:
-        verbose_print("Error: Couldn't extract the ensemble information from the output string", 0, 0)
+        verbose_print(
+            "Error: Couldn't extract the ensemble information from the output string",
+            0,
+            0,
+        )
 
     return json
 
+
 class binarycDecoder(json.JSONDecoder):
     """
     Custom decoder to transform the numbers that are strings to actual floats
     """
 
     def decode(self, s):
+        """
+        Entry point function for decoding
+        """
+
         result = super().decode(
             s
         )  # result = super(Decoder, self).decode(s) for Python 2.x
@@ -1279,7 +1304,14 @@ class binarycDecoder(json.JSONDecoder):
 
 
 class BinaryCEncoder(json.JSONEncoder):
+    """
+    Encoding class function to attempt to convert things to strings.
+    """
+
     def default(self, o):
+        """
+        Converting function. Well, could be more precise. look at the json module
+        """
         try:
             str_repr = str(o)
         except TypeError:
diff --git a/binarycpython/utils/grid.py b/binarycpython/utils/grid.py
index c90551566b60ade4aaf29eb1c321cc5c7375e8f1..e2c298231a817749e83fee38bedbf0ee1b9ef44f 100644
--- a/binarycpython/utils/grid.py
+++ b/binarycpython/utils/grid.py
@@ -57,8 +57,9 @@ from binarycpython.utils.functions import (
     binarycDecoder,
     merge_dicts,
     BinaryCEncoder,
-    extract_ensemble_json_from_string
+    extract_ensemble_json_from_string,
 )
+
 # from binarycpython.utils.hpc_functions import (
 #     get_condor_version,
 #     get_slurm_version,
@@ -70,6 +71,7 @@ from binarycpython.utils.functions import (
 from binarycpython import _binary_c_bindings
 import copy
 
+
 class Population:
     """
     Population Object. Contains all the necessary functions to set up, run and process a
@@ -87,7 +89,7 @@ class Population:
         # Setting stuff will check against the defaults to see if the input is correct.
         self.defaults = get_defaults()
         self.cleaned_up_defaults = self._cleanup_defaults()
-        
+
         # make the input dictionary
         self.bse_options = {}  # bse_options is just empty.
 
@@ -604,8 +606,8 @@ class Population:
     # Ensemble functions
     ###################################################
 
-    # Now they are stored in the _process_run_population thing. 
-    # Needed less code since they all 
+    # Now they are stored in the _process_run_population thing.
+    # Needed less code since they all
 
     ###################################################
     # Evolution functions
@@ -617,7 +619,7 @@ class Population:
         """
 
         # empty results
-        self.grid_options['results'] = {}
+        self.grid_options["results"] = {}
 
     def evolve(self) -> None:
         """
@@ -629,7 +631,7 @@ class Population:
             grid_options['slurm']: integer boolean whether to use a slurm_grid evolution
             grid_options['condor']: integer boolean whether to use a condor_grid evolution
 
-        If neither of the above is set, we continue without using HPC routines 
+        If neither of the above is set, we continue without using HPC routines
         (that doesn't mean this cannot be run on a server with many cores)
         """
 
@@ -650,8 +652,8 @@ class Population:
 
         # Put all interesting stuff in a variable and output that afterwards, as analytics of the run.
         analytics_dict = {
-            'population_name': self.grid_options['_population_id'],
-            'evolution_type': self.grid_options['evolution_type'],
+            "population_name": self.grid_options["_population_id"],
+            "evolution_type": self.grid_options["evolution_type"],
             "failed_count": self.grid_options["_failed_count"],
             "failed_prob": self.grid_options["_failed_prob"],
             "failed_systems_error_codes": self.grid_options[
@@ -711,7 +713,7 @@ class Population:
                 )
             )
 
-        # 
+        #
         self.grid_options["_end_time_evolution"] = time.time()
 
         # Log and print some information
@@ -744,7 +746,12 @@ class Population:
                     "ALL"
                     if not self.grid_options["_errors_exceeded"]
                     else "SOME (only the first ones, as there were too many to log all of them)",
-                    os.path.join(self.grid_options["tmp_dir"], "failed_systems_{}_X.txt".format(self.grid_options['_population_id'])),
+                    os.path.join(
+                        self.grid_options["tmp_dir"],
+                        "failed_systems_{}_X.txt".format(
+                            self.grid_options["_population_id"]
+                        ),
+                    ),
                 ),
                 self.grid_options["verbosity"],
                 0,
@@ -761,11 +768,11 @@ class Population:
         Function to evolve the population with multiprocessing approach.
         Using pathos to be able to include class-owned functions.
 
-        This function will create a pool with <self.grid_options["amt_cores"]> processes, and 
+        This function will create a pool with <self.grid_options["amt_cores"]> processes, and
         perform an imap_unordered to run the different `threads`.
-        Before this was done by giving a generator as the iterable, and have the processes get a 
+        Before this was done by giving a generator as the iterable, and have the processes get a
         certain chunksize each round.
-        Later on this seemed to be a bad decision, because it is difficult to pass information 
+        Later on this seemed to be a bad decision, because it is difficult to pass information
         back to the main controller, and because with each new batch of systems a new object instance was created.
 
         What I do now is I spawn these X amount of processes, and pass a range(self.grid_options["amt_cores"]) as iterable.
@@ -773,7 +780,7 @@ class Population:
         With this ID number each thread/process loops over the whole generator,
         but only runs the one <ID>'th system (if (localcounter+ID) % self.grid_options["amt_cores"]==0)'
 
-        When they are finished, these jobs are instructed to return a set of information 
+        When they are finished, these jobs are instructed to return a set of information
         (the result dict, TODO: describe what more)
 
         These resultation dictionaries are then merged and stored as object properties again.
@@ -806,7 +813,7 @@ class Population:
         pool.close()
         pool.join()
 
-        # Handle the results by merging all the dictionaries. How that merging happens exactly is 
+        # Handle the results by merging all the dictionaries. How that merging happens exactly is
         # described in the merge_dicts description.
         combined_output_dict = {}
         for output_dict in result:
@@ -814,7 +821,9 @@ class Population:
 
         # Put the values back as object properties
         self.grid_options["results"] = combined_output_dict["results"]
-        self.grid_options["ensemble_results"] = combined_output_dict["ensemble_results"] # Ensemble results are also passed as output from that dictionary
+        self.grid_options["ensemble_results"] = combined_output_dict[
+            "ensemble_results"
+        ]  # Ensemble results are also passed as output from that dictionary
         self.grid_options["_failed_count"] = combined_output_dict["_failed_count"]
         self.grid_options["_failed_prob"] = combined_output_dict["_failed_prob"]
         self.grid_options["_failed_systems_error_codes"] = list(
@@ -846,8 +855,8 @@ class Population:
                 "custom_logging_func_memaddr"
             ],
             store_memaddr=self.grid_options["_store_memaddr"],
-            population=1, # since this system is part of a population, we set this flag to prevent the store from being freed
-            persistent_data_memaddr=persistent_data_memaddr
+            population=1,  # since this system is part of a population, we set this flag to prevent the store from being freed
+            persistent_data_memaddr=persistent_data_memaddr,
         )
 
         # Check for errors
@@ -871,12 +880,14 @@ class Population:
             ID  # Store the ID as a object property again, lets see if that works.
         )
 
-        if self.bse_options.get('ensemble', 0) == 1:
+        if self.bse_options.get("ensemble", 0) == 1:
             # set persistent data memaddr if necessary.
             persistent_data_memaddr = (
                 _binary_c_bindings.return_persistent_data_memaddr()
             )
-            self.persistent_data_memory_dict = {self.process_ID: persistent_data_memaddr}
+            self.persistent_data_memory_dict = {
+                self.process_ID: persistent_data_memaddr
+            }
 
         # apparently we have to re-load this for every process, otherwise NameErrors arise (seems like a bug but I'm not sure)
         self._load_grid_function()
@@ -934,20 +945,33 @@ class Population:
             localcounter += 1
 
         # Handle ensemble output: is ensemble==1, then either directly write that data to a file, or combine everything into 1 file.
-        ensemble_json = {} # Make sure it exists already
-        if self.bse_options.get('ensemble', 0) == 1:
-            ensemble_raw_output = _binary_c_bindings.free_persistent_data_memaddr_and_return_json_output(
-                self.persistent_data_memory_dict[self.process_ID]
+        ensemble_json = {}  # Make sure it exists already
+        if self.bse_options.get("ensemble", 0) == 1:
+            ensemble_raw_output = (
+                _binary_c_bindings.free_persistent_data_memaddr_and_return_json_output(
+                    self.persistent_data_memory_dict[self.process_ID]
+                )
             )
 
-            # 
-            if self.grid_options['combine_ensemble_with_thread_joining'] == True:
-                ensemble_json = extract_ensemble_json_from_string(ensemble_raw_output) # Load this into a dict so that we can combine it later
+            #
+            if self.grid_options["combine_ensemble_with_thread_joining"] == True:
+                ensemble_json = extract_ensemble_json_from_string(
+                    ensemble_raw_output
+                )  # Load this into a dict so that we can combine it later
 
             else:
                 # If we do not allow this, automatically we will export this to the data_dir, in some formatted way
-                output_file = os.path.join(self.custom_options['data_dir'], "ensemble_output_{}_{}.json".format(self.grid_options['_population_id'], self.process_ID))
-                print("Thread {}: Chosen to output the ensemble results directly to file: {}".format(self.process_ID, output_file))
+                output_file = os.path.join(
+                    self.custom_options["data_dir"],
+                    "ensemble_output_{}_{}.json".format(
+                        self.grid_options["_population_id"], self.process_ID
+                    ),
+                )
+                print(
+                    "Thread {}: Chosen to output the ensemble results directly to file: {}".format(
+                        self.process_ID, output_file
+                    )
+                )
 
                 # Write to file
                 with open(output_file, "w") as f:
@@ -964,7 +988,7 @@ class Population:
             ],
             "_errors_exceeded": self.grid_options["_errors_exceeded"],
             "_errors_found": self.grid_options["_errors_found"],
-            "_probtot": probability_of_systems_run, 
+            "_probtot": probability_of_systems_run,
             "_count": number_of_systems_run,
         }
 
@@ -1051,7 +1075,7 @@ class Population:
         self.grid_options["_store_memaddr"] = _binary_c_bindings.return_store_memaddr()
 
         ### ensemble: make some checks for this
-        ## check the settings and set all the warnings. 
+        ## check the settings and set all the warnings.
         if self.bse_options.get("ensemble", None):
             if not self.bse_options["ensemble_defer"] == 1:
                 verbose_print(
@@ -1063,22 +1087,36 @@ class Population:
 
             if not self.custom_options["ensemble_output_name"]:
                 verbose_print(
-                    "Error: if you want to run an ensemble in a population, please set set 'ensemble_output_name'. It will be combined with 'data_dir' to write the output of the ensembles to", self.grid_options['verbosity'], 0)
+                    "Error: if you want to run an ensemble in a population, please set set 'ensemble_output_name'. It will be combined with 'data_dir' to write the output of the ensembles to",
+                    self.grid_options["verbosity"],
+                    0,
+                )
                 raise ValueError
 
-            if (not any([key.startswith("ensemble_filter_") for key in self.bse_options])):
+            if not any(
+                [key.startswith("ensemble_filter_") for key in self.bse_options]
+            ):
                 verbose_print(
-                    "Warning: Running the ensemble without any filter requires alot of available RAM", self.grid_options['verbosity'], 0)
+                    "Warning: Running the ensemble without any filter requires alot of available RAM",
+                    self.grid_options["verbosity"],
+                    0,
+                )
 
             if self.bse_options.get("ensemble_filters_off", None):
                 if self.bse_options["ensemble_filters_off"] == 0:
                     verbose_print(
-                        "Warning: Running the ensemble without any filter requires alot of available RAM", self.grid_options['verbosity'], 0)
+                        "Warning: Running the ensemble without any filter requires alot of available RAM",
+                        self.grid_options["verbosity"],
+                        0,
+                    )
 
-            if self.grid_options['combine_ensemble_with_thread_joining'] == False:
-                if not self.custom_options.get('data_dir', None):
+            if self.grid_options["combine_ensemble_with_thread_joining"] == False:
+                if not self.custom_options.get("data_dir", None):
                     verbose_print(
-                        "Error: chosen to write the ensemble output directly to files but data_dir isnt set", self.grid_options['verbosity'], 0)
+                        "Error: chosen to write the ensemble output directly to files but data_dir isnt set",
+                        self.grid_options["verbosity"],
+                        0,
+                    )
 
         # Check which type of population generation
         if self.grid_options["evolution_type"] == "grid":
@@ -1609,10 +1647,10 @@ class Population:
 
         self.grid_options["code_string"] = code_string
 
-
         # Write to file
         gridcode_filename = os.path.join(
-            self.grid_options["tmp_dir"], "binary_c_grid_{}.py".format(self.grid_options["_population_id"])
+            self.grid_options["tmp_dir"],
+            "binary_c_grid_{}.py".format(self.grid_options["_population_id"]),
         )
         self.grid_options["gridcode_filename"] = gridcode_filename
 
@@ -2014,249 +2052,249 @@ class Population:
     # subroutines to run CONDOR grids
     ###################################################
 
-#     def _condor_grid(self):
-#         """
-#         Main function that manages the CONDOR setup.
-
-#         Has three stages:
-
-#         - setup
-#         - evolve
-#         - join
-
-#         Which stage is used is determined by the value of grid_options['condor_command']:
-
-#         <empty>: the function will know its the user that executed the script and
-#         it will set up the necessary condor stuff
-
-#         'evolve': evolve_population is called to evolve the population of stars
-
-#         'join': We will attempt to join the output
-#         """
-
-#         # TODO: Put in function
-#         condor_version = get_condor_version()
-#         if not condor_version:
-#             verbose_print(
-#                 "CONDOR: Error: No installation of condor found",
-#                 self.grid_options["verbosity"],
-#                 0,
-#             )
-#         else:
-#             major_version = int(condor_version.split(".")[0])
-#             minor_version = int(condor_version.split(".")[1])
-
-#             if (major_version == 8) and (minor_version > 4):
-#                 verbose_print(
-#                     "CONDOR: Found version {} which is new enough".format(
-#                         condor_version
-#                     ),
-#                     self.grid_options["verbosity"],
-#                     0,
-#                 )
-#             elif major_version > 9:
-#                 verbose_print(
-#                     "CONDOR: Found version {} which is new enough".format(
-#                         condor_version
-#                     ),
-#                     self.grid_options["verbosity"],
-#                     0,
-#                 )
-#             else:
-#                 verbose_print(
-#                     "CONDOR: Found version {} which is too old (we require 8.3/8.4+)".format(
-#                         condor_version
-#                     ),
-#                     self.grid_options["verbosity"],
-#                     0,
-#                 )
-
-#         verbose_print(
-#             "Running Condor grid. command={}".format(
-#                 self.grid_options["condor_command"]
-#             ),
-#             self.grid_options["verbosity"],
-#             1,
-#         )
-#         if not self.grid_options["condor_command"]:
-#             # Setting up
-#             verbose_print(
-#                 "CONDOR: Main controller script. Setting up",
-#                 self.grid_options["verbosity"],
-#                 1,
-#             )
-
-#             # Set up working directories:
-#             verbose_print(
-#                 "CONDOR: creating working directories",
-#                 self.grid_options["verbosity"],
-#                 1,
-#             )
-#             create_directories_hpc(self.grid_options["condor_dir"])
-
-#             # Create command
-#             current_workingdir = os.getcwd()
-#             python_details = get_python_details()
-#             scriptname = path_of_calling_script()
-#             # command = "".join([
-#             #     "{}".python_details['executable'],
-#             #     "{}".scriptname,
-#             #     "offset=$jobarrayindex",
-#             #     "modulo={}".format(self.grid_options['condor_njobs']),
-#             #     "vb={}".format(self.grid_options['verbosity'])
-
-#             #      "results_hash_dumpfile=$self->{_grid_options}{slurm_dir}/results/$jobid.$jobarrayindex",
-#             #      'slurm_jobid='.$jobid,
-#             #      'slurm_jobarrayindex='.$jobarrayindex,
-#             #      'slurm_jobname=binary_grid_'.$jobid.'.'.$jobarrayindex,
-#             #      "slurm_njobs=$njobs",
-#             #      "slurm_dir=$self->{_grid_options}{slurm_dir}",
-#             # );
-
-#             # Create directory with info for the condor script. By creating this directory we also check whether all the values are set correctly
-#             # TODO: create the condor script.
-#             condor_script_options = {}
-#             # condor_script_options['n'] =
-#             condor_script_options["njobs"] = self.grid_options["condor_njobs"]
-#             condor_script_options["dir"] = self.grid_options["condor_dir"]
-#             condor_script_options["memory"] = self.grid_options["condor_memory"]
-#             condor_script_options["working_dir"] = self.grid_options[
-#                 "condor_working_dir"
-#             ]
-#             condor_script_options["command"] = self.grid_options["command"]
-#             condor_script_options["streams"] = self.grid_options["streams"]
-
-#             # TODO: condor works with running an executable.
-
-#             # Create script contents
-#             condor_script_contents = ""
-#             condor_script_contents += """
-# #################################################
-# #                       
-# # Condor script to run a binary_grid via python
-# #
-# #################################################
-# """
-#             condor_script_contents += "Executable\t= {}".format(executable)
-#             condor_script_contents += "arguments\t= {}".format(arguments)
-#             condor_script_contents += "environment\t= {}".format(environment)
-#             condor_script_contents += "universe\t= {}".format(
-#                 self.grid_options["condor_universe"]
-#             )
-#             condor_script_contents += "\n"
-#             condor_script_contents += "output\t= {}/stdout/$id\n".format(
-#                 self.grid_options["condor_dir"]
-#             )
-#             condor_script_contents += "error\t={}/sterr/$id".format(
-#                 self.grid_options["condor_dir"]
-#             )
-#             condor_script_contents += "log\t={}\n".format(
-#                 self.grid_options["condor_dir"]
-#             )
-#             condor_script_contents += "initialdir\t={}\n".format(current_workingdir)
-#             condor_script_contents += "remote_initialdir\t={}\n".format(
-#                 current_workingdir
-#             )
-#             condor_script_contents += "\n"
-#             condor_script_contents += "steam_output\t={}".format(stream)
-#             condor_script_contents += "steam_error\t={}".format(stream)
-#             condor_script_contents += "+WantCheckpoint = False"
-#             condor_script_contents += "\n"
-#             condor_script_contents += "request_memory\t={}".format(
-#                 self.grid_options["condor_memory"]
-#             )
-#             condor_script_contents += "ImageSize\t={}".format(
-#                 self.grid_options["condor_memory"]
-#             )
-#             condor_script_contents += "\n"
-
-#             if self.grid_options["condor_extra_settings"]:
-#                 slurm_script_contents += "# Extra settings by user:"
-#                 slurm_script_contents += "\n".join(
-#                     [
-#                         "{}\t={}".format(
-#                             key, self.grid_options["condor_extra_settings"][key]
-#                         )
-#                         for key in self.grid_options["condor_extra_settings"]
-#                     ]
-#                 )
-
-#             condor_script_contents += "\n"
-
-#             #   request_memory = $_[0]{memory}
-#             #   ImageSize = $_[0]{memory}
-
-#             #   Requirements = (1) \&\& (".
-#             #   $self->{_grid_options}{condor_requirements}.")\n";
-
-#             #
-#             # file name:  my_program.condor
-#             # Condor submit description file for my_program
-#             # Executable      = my_program
-#             # Universe        = vanilla
-#             # Error           = logs/err.$(cluster)
-#             # Output          = logs/out.$(cluster)
-#             # Log             = logs/log.$(cluster)
-
-#             # should_transfer_files = YES
-#             # when_to_transfer_output = ON_EXIT
-#             # transfer_input_files = files/in1,files/in2
-
-#             # Arguments       = files/in1 files/in2 files/out1
-#             # Queue
-
-#             # Write script contents to file
-#             if self.grid_options["condor_postpone_join"]:
-#                 condor_script_contents += "{} rungrid=0 results_hash_dumpfile={}/results/$jobid.all condor_command=join\n".format(
-#                     command, self.grid_options["condor_dir"]
-#                 )
-
-#             condor_script_filename = os.path.join(
-#                 self.grid_options["condor_dir"], "condor_script"
-#             )
-#             with open(condor_script_filename, "w") as condor_script_file:
-#                 condor_script_file.write(condor_script_contents)
-
-#             if self.grid_options["condor_postpone_sbatch"]:
-#                 # Execute or postpone the real call to sbatch
-#                 submit_command = "condor_submit {}".format(condor_script_filename)
-#                 verbose_print(
-#                     "running condor script {}".format(condor_script_filename),
-#                     self.grid_options["verbosity"],
-#                     0,
-#                 )
-#                 # subprocess.Popen(sbatch_command, close_fds=True)
-#                 # subprocess.Popen(sbatch_command, creationflags=subprocess.DETACHED_PROCESS)
-#                 verbose_print("Submitted scripts.", self.grid_options["verbosity"], 0)
-#             else:
-#                 verbose_print(
-#                     "Condor script is in {} but hasnt been executed".format(
-#                         condor_script_filename
-#                     ),
-#                     self.grid_options["verbosity"],
-#                     0,
-#                 )
-
-#             verbose_print("all done!", self.grid_options["verbosity"], 0)
-#             exit()
-
-#         elif self.grid_options["condor_command"] == "evolve":
-#             # TODO: write this function
-#             # Part to evolve the population.
-#             # TODO: decide how many CPUs
-#             verbose_print(
-#                 "CONDOR: Evolving population", self.grid_options["verbosity"], 1
-#             )
-
-#             #
-#             self.evolve_population()
-
-#         elif self.grid_options["condor_command"] == "join":
-#             # TODO: write this function
-#             # Joining the output.
-#             verbose_print("CONDOR: Joining results", self.grid_options["verbosity"], 1)
-
-#             pass
+    #     def _condor_grid(self):
+    #         """
+    #         Main function that manages the CONDOR setup.
+
+    #         Has three stages:
+
+    #         - setup
+    #         - evolve
+    #         - join
+
+    #         Which stage is used is determined by the value of grid_options['condor_command']:
+
+    #         <empty>: the function will know its the user that executed the script and
+    #         it will set up the necessary condor stuff
+
+    #         'evolve': evolve_population is called to evolve the population of stars
+
+    #         'join': We will attempt to join the output
+    #         """
+
+    #         # TODO: Put in function
+    #         condor_version = get_condor_version()
+    #         if not condor_version:
+    #             verbose_print(
+    #                 "CONDOR: Error: No installation of condor found",
+    #                 self.grid_options["verbosity"],
+    #                 0,
+    #             )
+    #         else:
+    #             major_version = int(condor_version.split(".")[0])
+    #             minor_version = int(condor_version.split(".")[1])
+
+    #             if (major_version == 8) and (minor_version > 4):
+    #                 verbose_print(
+    #                     "CONDOR: Found version {} which is new enough".format(
+    #                         condor_version
+    #                     ),
+    #                     self.grid_options["verbosity"],
+    #                     0,
+    #                 )
+    #             elif major_version > 9:
+    #                 verbose_print(
+    #                     "CONDOR: Found version {} which is new enough".format(
+    #                         condor_version
+    #                     ),
+    #                     self.grid_options["verbosity"],
+    #                     0,
+    #                 )
+    #             else:
+    #                 verbose_print(
+    #                     "CONDOR: Found version {} which is too old (we require 8.3/8.4+)".format(
+    #                         condor_version
+    #                     ),
+    #                     self.grid_options["verbosity"],
+    #                     0,
+    #                 )
+
+    #         verbose_print(
+    #             "Running Condor grid. command={}".format(
+    #                 self.grid_options["condor_command"]
+    #             ),
+    #             self.grid_options["verbosity"],
+    #             1,
+    #         )
+    #         if not self.grid_options["condor_command"]:
+    #             # Setting up
+    #             verbose_print(
+    #                 "CONDOR: Main controller script. Setting up",
+    #                 self.grid_options["verbosity"],
+    #                 1,
+    #             )
+
+    #             # Set up working directories:
+    #             verbose_print(
+    #                 "CONDOR: creating working directories",
+    #                 self.grid_options["verbosity"],
+    #                 1,
+    #             )
+    #             create_directories_hpc(self.grid_options["condor_dir"])
+
+    #             # Create command
+    #             current_workingdir = os.getcwd()
+    #             python_details = get_python_details()
+    #             scriptname = path_of_calling_script()
+    #             # command = "".join([
+    #             #     "{}".python_details['executable'],
+    #             #     "{}".scriptname,
+    #             #     "offset=$jobarrayindex",
+    #             #     "modulo={}".format(self.grid_options['condor_njobs']),
+    #             #     "vb={}".format(self.grid_options['verbosity'])
+
+    #             #      "results_hash_dumpfile=$self->{_grid_options}{slurm_dir}/results/$jobid.$jobarrayindex",
+    #             #      'slurm_jobid='.$jobid,
+    #             #      'slurm_jobarrayindex='.$jobarrayindex,
+    #             #      'slurm_jobname=binary_grid_'.$jobid.'.'.$jobarrayindex,
+    #             #      "slurm_njobs=$njobs",
+    #             #      "slurm_dir=$self->{_grid_options}{slurm_dir}",
+    #             # );
+
+    #             # Create directory with info for the condor script. By creating this directory we also check whether all the values are set correctly
+    #             # TODO: create the condor script.
+    #             condor_script_options = {}
+    #             # condor_script_options['n'] =
+    #             condor_script_options["njobs"] = self.grid_options["condor_njobs"]
+    #             condor_script_options["dir"] = self.grid_options["condor_dir"]
+    #             condor_script_options["memory"] = self.grid_options["condor_memory"]
+    #             condor_script_options["working_dir"] = self.grid_options[
+    #                 "condor_working_dir"
+    #             ]
+    #             condor_script_options["command"] = self.grid_options["command"]
+    #             condor_script_options["streams"] = self.grid_options["streams"]
+
+    #             # TODO: condor works with running an executable.
+
+    #             # Create script contents
+    #             condor_script_contents = ""
+    #             condor_script_contents += """
+    # #################################################
+    # #
+    # # Condor script to run a binary_grid via python
+    # #
+    # #################################################
+    # """
+    #             condor_script_contents += "Executable\t= {}".format(executable)
+    #             condor_script_contents += "arguments\t= {}".format(arguments)
+    #             condor_script_contents += "environment\t= {}".format(environment)
+    #             condor_script_contents += "universe\t= {}".format(
+    #                 self.grid_options["condor_universe"]
+    #             )
+    #             condor_script_contents += "\n"
+    #             condor_script_contents += "output\t= {}/stdout/$id\n".format(
+    #                 self.grid_options["condor_dir"]
+    #             )
+    #             condor_script_contents += "error\t={}/sterr/$id".format(
+    #                 self.grid_options["condor_dir"]
+    #             )
+    #             condor_script_contents += "log\t={}\n".format(
+    #                 self.grid_options["condor_dir"]
+    #             )
+    #             condor_script_contents += "initialdir\t={}\n".format(current_workingdir)
+    #             condor_script_contents += "remote_initialdir\t={}\n".format(
+    #                 current_workingdir
+    #             )
+    #             condor_script_contents += "\n"
+    #             condor_script_contents += "steam_output\t={}".format(stream)
+    #             condor_script_contents += "steam_error\t={}".format(stream)
+    #             condor_script_contents += "+WantCheckpoint = False"
+    #             condor_script_contents += "\n"
+    #             condor_script_contents += "request_memory\t={}".format(
+    #                 self.grid_options["condor_memory"]
+    #             )
+    #             condor_script_contents += "ImageSize\t={}".format(
+    #                 self.grid_options["condor_memory"]
+    #             )
+    #             condor_script_contents += "\n"
+
+    #             if self.grid_options["condor_extra_settings"]:
+    #                 slurm_script_contents += "# Extra settings by user:"
+    #                 slurm_script_contents += "\n".join(
+    #                     [
+    #                         "{}\t={}".format(
+    #                             key, self.grid_options["condor_extra_settings"][key]
+    #                         )
+    #                         for key in self.grid_options["condor_extra_settings"]
+    #                     ]
+    #                 )
+
+    #             condor_script_contents += "\n"
+
+    #             #   request_memory = $_[0]{memory}
+    #             #   ImageSize = $_[0]{memory}
+
+    #             #   Requirements = (1) \&\& (".
+    #             #   $self->{_grid_options}{condor_requirements}.")\n";
+
+    #             #
+    #             # file name:  my_program.condor
+    #             # Condor submit description file for my_program
+    #             # Executable      = my_program
+    #             # Universe        = vanilla
+    #             # Error           = logs/err.$(cluster)
+    #             # Output          = logs/out.$(cluster)
+    #             # Log             = logs/log.$(cluster)
+
+    #             # should_transfer_files = YES
+    #             # when_to_transfer_output = ON_EXIT
+    #             # transfer_input_files = files/in1,files/in2
+
+    #             # Arguments       = files/in1 files/in2 files/out1
+    #             # Queue
+
+    #             # Write script contents to file
+    #             if self.grid_options["condor_postpone_join"]:
+    #                 condor_script_contents += "{} rungrid=0 results_hash_dumpfile={}/results/$jobid.all condor_command=join\n".format(
+    #                     command, self.grid_options["condor_dir"]
+    #                 )
+
+    #             condor_script_filename = os.path.join(
+    #                 self.grid_options["condor_dir"], "condor_script"
+    #             )
+    #             with open(condor_script_filename, "w") as condor_script_file:
+    #                 condor_script_file.write(condor_script_contents)
+
+    #             if self.grid_options["condor_postpone_sbatch"]:
+    #                 # Execute or postpone the real call to sbatch
+    #                 submit_command = "condor_submit {}".format(condor_script_filename)
+    #                 verbose_print(
+    #                     "running condor script {}".format(condor_script_filename),
+    #                     self.grid_options["verbosity"],
+    #                     0,
+    #                 )
+    #                 # subprocess.Popen(sbatch_command, close_fds=True)
+    #                 # subprocess.Popen(sbatch_command, creationflags=subprocess.DETACHED_PROCESS)
+    #                 verbose_print("Submitted scripts.", self.grid_options["verbosity"], 0)
+    #             else:
+    #                 verbose_print(
+    #                     "Condor script is in {} but hasnt been executed".format(
+    #                         condor_script_filename
+    #                     ),
+    #                     self.grid_options["verbosity"],
+    #                     0,
+    #                 )
+
+    #             verbose_print("all done!", self.grid_options["verbosity"], 0)
+    #             exit()
+
+    #         elif self.grid_options["condor_command"] == "evolve":
+    #             # TODO: write this function
+    #             # Part to evolve the population.
+    #             # TODO: decide how many CPUs
+    #             verbose_print(
+    #                 "CONDOR: Evolving population", self.grid_options["verbosity"], 1
+    #             )
+
+    #             #
+    #             self.evolve_population()
+
+    #         elif self.grid_options["condor_command"] == "join":
+    #             # TODO: write this function
+    #             # Joining the output.
+    #             verbose_print("CONDOR: Joining results", self.grid_options["verbosity"], 1)
+
+    #             pass
 
     ###################################################
     # Unordered functions
diff --git a/binarycpython/utils/grid_options_defaults.py b/binarycpython/utils/grid_options_defaults.py
index f462c5cebc3f3be5f5abde68886afda37b8f3149..37cba2cb54843093a1d5346790b2b49e0b9b94fc 100644
--- a/binarycpython/utils/grid_options_defaults.py
+++ b/binarycpython/utils/grid_options_defaults.py
@@ -27,7 +27,7 @@ grid_options_defaults_dict = {
     "parse_function": None,  # FUnction to parse the output with.
     "tmp_dir": temp_dir(),  # Setting the temp dir of the program
     "_main_pid": -1,  # Placeholder for the main process id of the run.
-    "combine_ensemble_with_thread_joining": True, # Flag on whether to combine everything and return it to the user or if false: write it to data_dir/ensemble_output_{popuation_id}_{thread_id}.json
+    "combine_ensemble_with_thread_joining": True,  # Flag on whether to combine everything and return it to the user or if false: write it to data_dir/ensemble_output_{popuation_id}_{thread_id}.json
     # "output_dir":
     "_commandline_input": "",
     ##########################
@@ -86,7 +86,7 @@ grid_options_defaults_dict = {
     "weight": 1.0,  # weighting for the probability
     "repeat": 1,  # number of times to repeat each system (probability is adjusted to be 1/repeat)
     "results": {},  # dict to store the results. Every process fills this on its own and then it will be joined later
-    "ensemble_results": {}, # Dict to store the ensemble results
+    "ensemble_results": {},  # Dict to store the ensemble results
     "_start_time_evolution": 0,  # Start time of the grid
     "_end_time_evolution": 0,  # end time of the grid
     "_errors_found": False,  # Flag whether there are any errors from binary_c
@@ -168,8 +168,6 @@ grid_options_defaults_dict = {
     # "condor_memory": 1024,  # in MB, the memory use (ImageSize) of the job
     # "condor_universe": "vanilla",  # usually vanilla universe
     # "condor_extra_settings": {},  # Place to put extra configuration for the CONDOR submit file. The key and value of the dict will become the key and value of the line in te slurm batch file. Will be put in after all the other settings (and before the command). Take care not to overwrite something without really meaning to do so.
-
-
     # snapshots and checkpoints
     # condor_snapshot_on_kill=>0, # if 1 snapshot on SIGKILL before exit
     # condor_load_from_snapshot=>0, # if 1 check for snapshot .sv file and load it if found
@@ -467,7 +465,6 @@ grid_options_descriptions = {
     "evolution_type": "Variable containing the type of evolution used of the grid. Multiprocessing or linear processing",
     "combine_ensemble_with_thread_joining": "BOolean flag on whether to combine everything and return it to the user or if false: write it to data_dir/ensemble_output_{popuation_id}_{thread_id}.json",
     "ensemble_results": "Dictinary that stores the ensemble results if combine_ensemble_with_thread_joining==True",
-
 }
 
 #################################
diff --git a/binarycpython/utils/hpc_functions.py b/binarycpython/utils/hpc_functions.py
index e9795c86695c2a2934c141742c60e41fe727bf04..248f4194226901fb2db3f87dd26de094fcd1a73c 100644
--- a/binarycpython/utils/hpc_functions.py
+++ b/binarycpython/utils/hpc_functions.py
@@ -1,10 +1,10 @@
-# """
-# File containing functions for HPC computing, distributed tasks on clusters etc.
+"""
+File containing functions for HPC computing, distributed tasks on clusters etc.
 
-# Functions that the slurm and condor subroutines of the population object use.
+Functions that the slurm and condor subroutines of the population object use.
 
-# Mainly divided in 2 sections: Slurm and Condor
-# """
+Mainly divided in 2 sections: Slurm and Condor
+"""
 
 # import os
 # import sys
diff --git a/binarycpython/utils/plot_functions.py b/binarycpython/utils/plot_functions.py
index d6e558575a392e3e0e4b6e747ef65152cce2d041..16a1003333e234866eba4b76ef7c96a71ff18253 100644
--- a/binarycpython/utils/plot_functions.py
+++ b/binarycpython/utils/plot_functions.py
@@ -87,6 +87,10 @@ Printf("HR_PLOTTING %30.12e %d %d %g %g %g %g %g %g\\n",
 
 
 def color_by_index(row, column, colors):
+    """
+    Function that returns a color based on row and column information. Used to color the stellar types
+    """
+
     return colors[int(row[column])]
 
 
diff --git a/docstring_coverage.svg b/docstring_coverage.svg
new file mode 100644
index 0000000000000000000000000000000000000000..b71949aac0d3a90efe74902035e4c799556a3837
--- /dev/null
+++ b/docstring_coverage.svg
@@ -0,0 +1,20 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="142" height="20">
+    <linearGradient id="s" x2="0" y2="100%">
+        <stop offset="0" stop-color="#bbb" stop-opacity=".1"/>
+        <stop offset="1" stop-opacity=".1"/>
+    </linearGradient>
+    <clipPath id="r">
+        <rect width="142" height="20" rx="3" fill="#fff"/>
+    </clipPath>
+    <g clip-path="url(#r)">
+        <rect width="99" height="20" fill="#555"/>
+        <rect x="99" width="43" height="20" fill="#a4a61d"/>
+        <rect width="142" height="20" fill="url(#s)"/>
+    </g>
+    <g fill="#fff" text-anchor="middle" font-family="Verdana,Geneva,DejaVu Sans,sans-serif" font-size="110">
+        <text x="505" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="890">docstr-coverage</text>
+        <text x="505" y="140" transform="scale(.1)" textLength="890">docstr-coverage</text>
+        <text x="1195" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)">82%</text>
+        <text x="1195" y="140" transform="scale(.1)">82%</text>
+    </g>
+</svg>
\ No newline at end of file
diff --git a/interrogate_badge.svg b/interrogate_badge.svg
new file mode 100644
index 0000000000000000000000000000000000000000..3e810a4d98f64acb5219ebc0ca4ffeed4d211cd1
--- /dev/null
+++ b/interrogate_badge.svg
@@ -0,0 +1,54 @@
+<svg width="140" height="20" viewBox="0 0 140 20" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2;">
+    <g transform="matrix(1,0,0,1,22,0)">
+        <g id="backgrounds" transform="matrix(1.32789,0,0,1,-22.3892,0)">
+            <rect x="0" y="0" width="71" height="20" style="fill:rgb(85,85,85);"/>
+        </g>
+        <rect x="71" y="0" width="47" height="20" data-interrogate="color" style="fill:#a4a61d"/>
+        <g transform="matrix(1.19746,0,0,1,-22.3744,-4.85723e-16)">
+            <rect x="0" y="0" width="118" height="20" style="fill:url(#_Linear1);"/>
+        </g>
+    </g>
+    <g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110">
+        <text x="590" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="610">interrogate</text>
+        <text x="590" y="140" transform="scale(.1)" textLength="610">interrogate</text>
+        <text x="1160" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="370" data-interrogate="result">80.9%</text>
+        <text x="1160" y="140" transform="scale(.1)" textLength="370" data-interrogate="result">80.9%</text>
+    </g>
+    <g id="logo-shadow" serif:id="logo shadow" transform="matrix(0.854876,0,0,0.854876,-6.73514,1.732)">
+        <g transform="matrix(0.299012,0,0,0.299012,9.70229,-6.68582)">
+            <path d="M50,64.25C52.76,64.25 55,61.13 55,59.75C55,58.37 52.76,57.25 50,57.25C47.24,57.25 45,58.37 45,59.75C45,61.13 47.24,64.25 50,64.25Z" style="fill:rgb(1,1,1);fill-opacity:0.3;fill-rule:nonzero;"/>
+        </g>
+        <g transform="matrix(0.299012,0,0,0.299012,9.70229,-6.68582)">
+            <path d="M88,49.05C86.506,43.475 83.018,38.638 78.2,35.46C72.969,32.002 66.539,30.844 60.43,32.26C56.576,33.145 52.995,34.958 50,37.54C46.998,34.958 43.411,33.149 39.55,32.27C33.441,30.853 27.011,32.011 21.78,35.47C16.97,38.652 13.489,43.489 12,49.06L12,49.13C11.82,49.79 11.66,50.46 11.53,51.13C11.146,53.207 11.021,55.323 11.16,57.43C11.16,58.03 11.26,58.63 11.34,59.23C11.34,59.51 11.43,59.79 11.48,60.07C11.53,60.35 11.58,60.68 11.64,60.98C11.7,61.28 11.8,61.69 11.89,62.05C11.98,62.41 11.99,62.47 12.05,62.68C12.16,63.07 12.28,63.46 12.41,63.84L12.58,64.34C12.72,64.74 12.88,65.14 13.04,65.53L13.23,65.98C13.403,66.373 13.583,66.767 13.77,67.16L13.99,67.59C14.19,67.97 14.39,68.35 14.61,68.73L14.87,69.15C15.1,69.52 15.33,69.89 15.58,70.26L15.58,70.32L15.99,70.93C16.14,71.14 16.29,71.36 16.45,71.57C20.206,75.83 25.086,78.95 30.53,80.57C36.839,82.48 43.41,83.385 50,83.25C56.599,83.374 63.177,82.456 69.49,80.53C74.644,78.978 79.303,76.102 83,72.19C83.34,71.78 83.65,71.35 84,70.92L84.18,70.66L84.33,70.44L84.41,70.32C84.55,70.12 84.67,69.9 84.81,69.7C85.07,69.3 85.32,68.89 85.55,68.48C85.78,68.07 86.02,67.65 86.23,67.22C86.31,67.05 86.39,66.88 86.47,66.7C86.67,66.28 86.85,65.87 87.03,65.44L87.23,64.92C87.397,64.487 87.55,64.05 87.69,63.61L87.85,63.09C87.98,62.64 88.1,62.19 88.21,61.74C88.21,61.57 88.3,61.39 88.33,61.22C88.43,60.75 88.52,60.22 88.6,59.79C88.6,59.64 88.66,59.49 88.68,59.33C88.77,58.71 88.84,58.08 88.88,57.45L88.88,54.17C88.817,53.164 88.693,52.162 88.51,51.17C88.38,50.5 88.23,49.84 88.05,49.17L88,49.05ZM85.89,56.44L85.89,57.23C85.89,57.78 85.79,58.32 85.72,58.86C85.72,59.01 85.72,59.15 85.65,59.3C85.59,59.7 85.51,60.11 85.43,60.51L85.32,60.99C85.23,61.38 85.12,61.77 85.01,62.16C85.01,62.31 84.93,62.46 84.88,62.6C84.74,63.04 84.59,63.47 84.42,63.9L84.27,64.28C84.1,64.71 83.91,65.14 83.71,65.56C83.51,65.98 83.43,66.12 83.28,66.4L83.01,66.91C82.83,67.223 82.643,67.537 82.45,67.85L82.35,68.01C79.121,68.047 75.918,67.434 72.93,66.21C64.27,62.74 59,55.52 61.18,50.11C62.18,47.6 64.7,45.82 68.26,45.11C72.489,44.395 76.835,44.908 80.78,46.59C82.141,47.144 83.453,47.813 84.7,48.59C84.76,48.76 84.82,48.93 84.88,49.1C84.94,49.27 85.05,49.63 85.12,49.9C85.28,50.5 85.44,51.1 85.55,51.73C85.691,52.507 85.792,53.292 85.85,54.08L85.85,55.89C85.85,56.12 85.91,56.25 85.91,56.45L85.89,56.44ZM17.66,68C16.668,66.435 15.869,64.756 15.28,63L15.17,62.68C15.06,62.35 14.96,62.01 14.87,61.68C14.823,61.493 14.777,61.31 14.73,61.13C14.66,60.84 14.59,60.55 14.53,60.27C14.47,59.99 14.43,59.72 14.38,59.44C14.33,59.16 14.3,59 14.27,58.78C14.2,58.27 14.15,57.78 14.11,57.23L14.11,57.03C14.008,55.236 14.122,53.437 14.45,51.67C14.56,51.06 14.71,50.46 14.88,49.87C14.96,49.59 15.04,49.32 15.13,49.05C15.22,48.78 15.24,48.72 15.3,48.55C16.548,47.774 17.859,47.105 19.22,46.55C27.86,43.09 36.65,44.67 38.82,50.08C40.99,55.49 35.73,62.74 27.09,66.2C24.101,67.431 20.893,68.043 17.66,68ZM68.57,77.68C62.554,79.508 56.287,80.376 50,80.25C43.737,80.37 37.495,79.506 31.5,77.69C27.185,76.38 23.243,74.062 20,70.93C22.815,70.706 25.58,70.055 28.2,69C38.37,64.92 44.39,56 41.6,49C38.81,42 28.27,39.72 18.1,43.8L17.43,44.09C18.973,41.648 21.019,39.561 23.43,37.97C26.671,35.824 30.473,34.68 34.36,34.68C35.884,34.681 37.404,34.852 38.89,35.19C42.694,36.049 46.191,37.935 49,40.64L50,41.64L51,40.64C53.797,37.937 57.279,36.049 61.07,35.18C66.402,33.947 72.014,34.968 76.57,38C78.98,39.588 81.026,41.671 82.57,44.11L81.9,43.82C77.409,41.921 72.464,41.355 67.66,42.19C63.08,43.12 59.79,45.54 58.39,49.02C55.6,55.97 61.62,64.94 71.79,69.02C74.414,70.07 77.182,70.714 80,70.93C76.776,74.05 72.859,76.363 68.57,77.68Z" style="fill:rgb(1,1,1);fill-opacity:0.3;fill-rule:nonzero;"/>
+        </g>
+        <g transform="matrix(0.299012,0,0,0.299012,9.70229,-6.68582)">
+            <circle cx="71.33" cy="56" r="5.16" style="fill:rgb(1,1,1);fill-opacity:0.3;"/>
+        </g>
+        <g transform="matrix(0.299012,0,0,0.299012,9.70229,-6.68582)">
+            <circle cx="28.67" cy="56" r="5.16" style="fill:rgb(1,1,1);fill-opacity:0.3;"/>
+        </g>
+        <g transform="matrix(0.299012,0,0,0.299012,9.70229,-6.68582)">
+            <path d="M58,66C55.912,68.161 53.003,69.339 50,69.24C46.997,69.339 44.088,68.161 42,66C41.714,65.677 41.302,65.491 40.87,65.491C40.042,65.491 39.361,66.172 39.361,67C39.361,67.368 39.496,67.724 39.74,68C42.403,70.804 46.134,72.35 50,72.25C53.862,72.347 57.59,70.802 60.25,68C60.495,67.725 60.63,67.369 60.63,67C60.63,66.174 59.951,65.495 59.125,65.495C58.695,65.495 58.285,65.679 58,66Z" style="fill:rgb(1,1,1);fill-opacity:0.3;fill-rule:nonzero;"/>
+        </g>
+    </g>
+    <g id="logo-pink" serif:id="logo pink" transform="matrix(0.854876,0,0,0.854876,-6.73514,0.877124)">
+        <g transform="matrix(0.299012,0,0,0.299012,9.70229,-6.68582)">
+            <path d="M50,64.25C52.76,64.25 55,61.13 55,59.75C55,58.37 52.76,57.25 50,57.25C47.24,57.25 45,58.37 45,59.75C45,61.13 47.24,64.25 50,64.25Z" style="fill:rgb(222,120,160);fill-rule:nonzero;"/>
+        </g>
+        <g transform="matrix(0.299012,0,0,0.299012,9.70229,-6.68582)">
+            <path d="M88,49.05C86.506,43.475 83.018,38.638 78.2,35.46C72.969,32.002 66.539,30.844 60.43,32.26C56.576,33.145 52.995,34.958 50,37.54C46.998,34.958 43.411,33.149 39.55,32.27C33.441,30.853 27.011,32.011 21.78,35.47C16.97,38.652 13.489,43.489 12,49.06L12,49.13C11.82,49.79 11.66,50.46 11.53,51.13C11.146,53.207 11.021,55.323 11.16,57.43C11.16,58.03 11.26,58.63 11.34,59.23C11.34,59.51 11.43,59.79 11.48,60.07C11.53,60.35 11.58,60.68 11.64,60.98C11.7,61.28 11.8,61.69 11.89,62.05C11.98,62.41 11.99,62.47 12.05,62.68C12.16,63.07 12.28,63.46 12.41,63.84L12.58,64.34C12.72,64.74 12.88,65.14 13.04,65.53L13.23,65.98C13.403,66.373 13.583,66.767 13.77,67.16L13.99,67.59C14.19,67.97 14.39,68.35 14.61,68.73L14.87,69.15C15.1,69.52 15.33,69.89 15.58,70.26L15.58,70.32L15.99,70.93C16.14,71.14 16.29,71.36 16.45,71.57C20.206,75.83 25.086,78.95 30.53,80.57C36.839,82.48 43.41,83.385 50,83.25C56.599,83.374 63.177,82.456 69.49,80.53C74.644,78.978 79.303,76.102 83,72.19C83.34,71.78 83.65,71.35 84,70.92L84.18,70.66L84.33,70.44L84.41,70.32C84.55,70.12 84.67,69.9 84.81,69.7C85.07,69.3 85.32,68.89 85.55,68.48C85.78,68.07 86.02,67.65 86.23,67.22C86.31,67.05 86.39,66.88 86.47,66.7C86.67,66.28 86.85,65.87 87.03,65.44L87.23,64.92C87.397,64.487 87.55,64.05 87.69,63.61L87.85,63.09C87.98,62.64 88.1,62.19 88.21,61.74C88.21,61.57 88.3,61.39 88.33,61.22C88.43,60.75 88.52,60.22 88.6,59.79C88.6,59.64 88.66,59.49 88.68,59.33C88.77,58.71 88.84,58.08 88.88,57.45L88.88,54.17C88.817,53.164 88.693,52.162 88.51,51.17C88.38,50.5 88.23,49.84 88.05,49.17L88,49.05ZM85.89,56.44L85.89,57.23C85.89,57.78 85.79,58.32 85.72,58.86C85.72,59.01 85.72,59.15 85.65,59.3C85.59,59.7 85.51,60.11 85.43,60.51L85.32,60.99C85.23,61.38 85.12,61.77 85.01,62.16C85.01,62.31 84.93,62.46 84.88,62.6C84.74,63.04 84.59,63.47 84.42,63.9L84.27,64.28C84.1,64.71 83.91,65.14 83.71,65.56C83.51,65.98 83.43,66.12 83.28,66.4L83.01,66.91C82.83,67.223 82.643,67.537 82.45,67.85L82.35,68.01C79.121,68.047 75.918,67.434 72.93,66.21C64.27,62.74 59,55.52 61.18,50.11C62.18,47.6 64.7,45.82 68.26,45.11C72.489,44.395 76.835,44.908 80.78,46.59C82.141,47.144 83.453,47.813 84.7,48.59C84.76,48.76 84.82,48.93 84.88,49.1C84.94,49.27 85.05,49.63 85.12,49.9C85.28,50.5 85.44,51.1 85.55,51.73C85.691,52.507 85.792,53.292 85.85,54.08L85.85,55.89C85.85,56.12 85.91,56.25 85.91,56.45L85.89,56.44ZM17.66,68C16.668,66.435 15.869,64.756 15.28,63L15.17,62.68C15.06,62.35 14.96,62.01 14.87,61.68C14.823,61.493 14.777,61.31 14.73,61.13C14.66,60.84 14.59,60.55 14.53,60.27C14.47,59.99 14.43,59.72 14.38,59.44C14.33,59.16 14.3,59 14.27,58.78C14.2,58.27 14.15,57.78 14.11,57.23L14.11,57.03C14.008,55.236 14.122,53.437 14.45,51.67C14.56,51.06 14.71,50.46 14.88,49.87C14.96,49.59 15.04,49.32 15.13,49.05C15.22,48.78 15.24,48.72 15.3,48.55C16.548,47.774 17.859,47.105 19.22,46.55C27.86,43.09 36.65,44.67 38.82,50.08C40.99,55.49 35.73,62.74 27.09,66.2C24.101,67.431 20.893,68.043 17.66,68ZM68.57,77.68C62.554,79.508 56.287,80.376 50,80.25C43.737,80.37 37.495,79.506 31.5,77.69C27.185,76.38 23.243,74.062 20,70.93C22.815,70.706 25.58,70.055 28.2,69C38.37,64.92 44.39,56 41.6,49C38.81,42 28.27,39.72 18.1,43.8L17.43,44.09C18.973,41.648 21.019,39.561 23.43,37.97C26.671,35.824 30.473,34.68 34.36,34.68C35.884,34.681 37.404,34.852 38.89,35.19C42.694,36.049 46.191,37.935 49,40.64L50,41.64L51,40.64C53.797,37.937 57.279,36.049 61.07,35.18C66.402,33.947 72.014,34.968 76.57,38C78.98,39.588 81.026,41.671 82.57,44.11L81.9,43.82C77.409,41.921 72.464,41.355 67.66,42.19C63.08,43.12 59.79,45.54 58.39,49.02C55.6,55.97 61.62,64.94 71.79,69.02C74.414,70.07 77.182,70.714 80,70.93C76.776,74.05 72.859,76.363 68.57,77.68Z" style="fill:rgb(222,120,160);fill-rule:nonzero;"/>
+        </g>
+        <g transform="matrix(0.299012,0,0,0.299012,9.70229,-6.68582)">
+            <circle cx="71.33" cy="56" r="5.16" style="fill:rgb(222,120,160);"/>
+        </g>
+        <g transform="matrix(0.299012,0,0,0.299012,9.70229,-6.68582)">
+            <circle cx="28.67" cy="56" r="5.16" style="fill:rgb(222,120,160);"/>
+        </g>
+        <g transform="matrix(0.299012,0,0,0.299012,9.70229,-6.68582)">
+            <path d="M58,66C55.912,68.161 53.003,69.339 50,69.24C46.997,69.339 44.088,68.161 42,66C41.714,65.677 41.302,65.491 40.87,65.491C40.042,65.491 39.361,66.172 39.361,67C39.361,67.368 39.496,67.724 39.74,68C42.403,70.804 46.134,72.35 50,72.25C53.862,72.347 57.59,70.802 60.25,68C60.495,67.725 60.63,67.369 60.63,67C60.63,66.174 59.951,65.495 59.125,65.495C58.695,65.495 58.285,65.679 58,66Z" style="fill:rgb(222,120,160);fill-rule:nonzero;"/>
+        </g>
+    </g>
+    <defs>
+        <linearGradient id="_Linear1" x1="0" y1="0" x2="1" y2="0" gradientUnits="userSpaceOnUse" gradientTransform="matrix(1.22465e-15,20,-20,1.22465e-15,0,0)"><stop offset="0" style="stop-color:rgb(187,187,187);stop-opacity:0.1"/><stop offset="1" style="stop-color:black;stop-opacity:0.1"/></linearGradient>
+    </defs>
+</svg>