diff --git a/.gitignore b/.gitignore
index dc10c7c5367c3dd92e78694a9d7f9ec55c445a0f..180e51dcc3fd7c05b9039988dbcd04b74d7117fc 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,6 +1,8 @@
 notes/
 snippets/
 make_output.txt
+tests/population/scaling/scaling_plots/*
+
 *.org~
 *.so
 *.pyc
diff --git a/binarycpython.yml b/binarycpython.yml
new file mode 100644
index 0000000000000000000000000000000000000000..0c8f4644b7942895a43ca8f2fe82ce81314a9a51
--- /dev/null
+++ b/binarycpython.yml
@@ -0,0 +1,13 @@
+# /user/HS128/dh00601/.config/tmuxinator/binaryc.yml
+
+name: binarycpython
+root: <%= ENV["BINARYC_PYTHON"] %>
+
+
+windows:
+  - binarycpython:
+      layout: tiled
+      panes:
+        - ls
+        - subl .
+        - cd $BINARYC_DATA_ROOT/
\ No newline at end of file
diff --git a/binarycpython/utils/IMF.py b/binarycpython/utils/IMF.py
deleted file mode 100644
index d9b2c7de3fe45efd6afa7138846bd0a2ca28a818..0000000000000000000000000000000000000000
--- a/binarycpython/utils/IMF.py
+++ /dev/null
@@ -1,4 +0,0 @@
-### 
-# File containing functions to calculate probability of existence of star. Copied and inspired by the perl module IMF.pm or Rob Izzard
-
-# TODO: add 
\ No newline at end of file
diff --git a/binarycpython/utils/distribution_functions.py b/binarycpython/utils/distribution_functions.py
index 62f0c15cee6642e220d5b562b61d6c316c21df93..278bbef122b138c9927c955fb2fef5bbc4c71ec1 100644
--- a/binarycpython/utils/distribution_functions.py
+++ b/binarycpython/utils/distribution_functions.py
@@ -6,17 +6,19 @@ import binarycpython.utils.useful_funcs
 # Mostly copied from the perl modules
 
 # TODO: make some things globally present? rob does this in his module. i guess it saves calculations but not sure if im gonna do that now
-# TODO: Add the stuff from the IMF file 
+# TODO: Add the stuff from the IMF file
 # TODO: call all of these functions to check whether they work
 # TODO: make global constants stuff
 # TODO: make description of module submodule
 
-log_ln_converter = (1.0/math.log(10.0))
+log_ln_converter = 1.0 / math.log(10.0)
+
 
 def flat(parameter):
     """
     Dummt distribution function that returns 1
     """
+
     return 1
 
 
@@ -69,21 +71,36 @@ def powerlaw(min_val, max_val, k, x):
         # )
         return y
 
+
 def calculate_constants_three_part_powerlaw(m0, m1, m2, m_max, p1, p2, p3):
-    print("Initialising constants for the three-part powerlaw: m0={} m1={} m2={} m_max={} p1={} p2={} p3={}\n".format(m0, m1, m2, m_max, p1, p2, p3))
-    # local $SIG{__DIE__} = sub { Carp::confess @_ }; ?
+    # print("Initialising constants for the three-part powerlaw: m0={} m1={} m2={} m_max={} p1={} p2={} p3={}\n".format(m0, m1, m2, m_max, p1, p2, p3))
 
     array_constants_three_part_powerlaw = [0, 0, 0]
 
-    array_constants_three_part_powerlaw[1] =  ((m1**p2) * (m1**(-p1))) * (1.0/(1.0 + p1)) * (m1**(1.0 + p1) - m0**(1.0 + p1))
-    array_constants_three_part_powerlaw[1] +=  ((m2 ** (1.0 + p2) - m1 ** (1.0+p2))) * (1.0/(1.0+p2))
-    array_constants_three_part_powerlaw[1] += ((m2**p2) * (m2**(-p3))) * (1.0/(1.0 + p3)) * (m_max**(1.0+p3) - m2**(1.0+p3))
-    array_constants_three_part_powerlaw[1] = 1.0/(array_constants_three_part_powerlaw[1] + 1e-50)
-
-    array_constants_three_part_powerlaw[0] = array_constants_three_part_powerlaw[1] * ((m1**p2)*(m1**(-p1)))
-    array_constants_three_part_powerlaw[2] = array_constants_three_part_powerlaw[1] * ((m2**p2)*(m2**(-p3)))
+    array_constants_three_part_powerlaw[1] = (
+        ((m1 ** p2) * (m1 ** (-p1)))
+        * (1.0 / (1.0 + p1))
+        * (m1 ** (1.0 + p1) - m0 ** (1.0 + p1))
+    )
+    array_constants_three_part_powerlaw[1] += (
+        (m2 ** (1.0 + p2) - m1 ** (1.0 + p2))
+    ) * (1.0 / (1.0 + p2))
+    array_constants_three_part_powerlaw[1] += (
+        ((m2 ** p2) * (m2 ** (-p3)))
+        * (1.0 / (1.0 + p3))
+        * (m_max ** (1.0 + p3) - m2 ** (1.0 + p3))
+    )
+    array_constants_three_part_powerlaw[1] = 1.0 / (
+        array_constants_three_part_powerlaw[1] + 1e-50
+    )
+
+    array_constants_three_part_powerlaw[0] = array_constants_three_part_powerlaw[1] * (
+        (m1 ** p2) * (m1 ** (-p1))
+    )
+    array_constants_three_part_powerlaw[2] = array_constants_three_part_powerlaw[1] * (
+        (m2 ** p2) * (m2 ** (-p3))
+    )
 
-    # print(array_constants_three_part_powerlaw)
     return array_constants_three_part_powerlaw
     # $$array[1]=(($m1**$p2)*($m1**(-$p1)))*
     # (1.0/(1.0+$p1))*
@@ -99,6 +116,7 @@ def calculate_constants_three_part_powerlaw(m0, m1, m2, m_max, p1, p2, p3):
     # #print "ARRAY SET @_ => @$array\n";
     # $threepart_powerlaw_consts{"@_"}=[@$array];
 
+
 def three_part_powerlaw(M, M0, M1, M2, M_MAX, P1, P2, P3):
     """
     Generalized three-part power law, usually used for mass distributions
@@ -106,22 +124,25 @@ def three_part_powerlaw(M, M0, M1, M2, M_MAX, P1, P2, P3):
 
     # TODO: add check on whether the values exist
 
-    three_part_powerlaw_constants = calculate_constants_three_part_powerlaw(M0, M1, M2, M_MAX, P1, P2, P3)
-
-    # 
-    if (M < M0):
-        p = 0                                            # Below 0
-    elif (M0 < M <= M1):
-        p = three_part_powerlaw_constants[0] * (M ** P1) # Between M0 and M1
-    elif (M1 < M <= M2):
-        p = three_part_powerlaw_constants[1] * (M ** P2) # Between M1 and M2
-    elif (M2 < M <= M_MAX):
-        p = three_part_powerlaw_constants[2] * (M ** P3) # Between M2 and M_MAX
+    three_part_powerlaw_constants = calculate_constants_three_part_powerlaw(
+        M0, M1, M2, M_MAX, P1, P2, P3
+    )
+
+    #
+    if M < M0:
+        p = 0  # Below 0
+    elif M0 < M <= M1:
+        p = three_part_powerlaw_constants[0] * (M ** P1)  # Between M0 and M1
+    elif M1 < M <= M2:
+        p = three_part_powerlaw_constants[1] * (M ** P2)  # Between M1 and M2
+    elif M2 < M <= M_MAX:
+        p = three_part_powerlaw_constants[2] * (M ** P3)  # Between M2 and M_MAX
     else:
-        p = 0                                            # Above M_MAX
+        p = 0  # Above M_MAX
 
     return p
 
+
 def const(min_bound, max_bound, val=None):
     """
     a constant distribution function between min=$_[0] and max=$_[1]
@@ -132,22 +153,25 @@ def const(min_bound, max_bound, val=None):
             print("out of bounds")
             p = 0
             return p
-    p = (1.0/(min_bound - max_bound))
+    p = 1.0 / (min_bound - max_bound)
     return p
 
+
 def set_opts(opts, newopts):
     """
     Function to take a default dict and override it with newer values.
     """
 
     # DONE: put in check to make sure that the newopts keys are contained in opts
+    # TODO: change this to just a dict.update
 
     if newopts:
         for opt in newopts.keys():
             if opt in opts.keys():
                 opts[key] = newopts[key]
 
-    return newopts
+    return opt
+
 
 def gaussian(x, mean, sigma, gmin, gmax):
     """
@@ -158,7 +182,7 @@ def gaussian(x, mean, sigma, gmin, gmax):
     # # location (X value), mean and sigma, min and max range
     # my ($x,$mean,$sigma,$gmin,$gmax) = @_;
 
-    if ((x < gmin) or (x > gmax)):
+    if (x < gmin) or (x > gmax):
         p = 0
     else:
         # normalize over given range
@@ -168,6 +192,7 @@ def gaussian(x, mean, sigma, gmin, gmax):
 
     return p
 
+
 def gaussian_normalizing_const(mean, sigma, gmin, gmax):
     """
     Function to calculate the normalisation constant for the gaussian
@@ -176,43 +201,66 @@ def gaussian_normalizing_const(mean, sigma, gmin, gmax):
     # First time; calculate multipllier for given mean and sigma
     ptot = 0
     resolution = 1000
-    d = (gmax-gmin)/resolution
+    d = (gmax - gmin) / resolution
 
     for i in range(resolution):
-        y = gmin + i*d
+        y = gmin + i * d
         ptot += d * gaussian_func(y, mean, sigma)
 
-
     # TODO: Set value in global
     return ptot
 
+
 def gaussian_func(x, mean, sigma):
     """
     Function to evaluate a gaussian at a given point
     """
-    GAUSSIAN_PREFACTOR = (1.0/math.sqrt(2.0*math.pi));
+    GAUSSIAN_PREFACTOR = 1.0 / math.sqrt(2.0 * math.pi)
 
-    r = 1.0/(sigma)
+    r = 1.0 / (sigma)
     y = (x - mean) * r
-    return GAUSSIAN_PREFACTOR * r * math.exp(-0.5 * y**2)
+    return GAUSSIAN_PREFACTOR * r * math.exp(-0.5 * y ** 2)
+
 
 #####
 # Mass distributions
-##### 
+#####
 
-def Kroupa2001(m, newopts):
-    """
-    probability distribution function for kroupa 2001 IMF 
 
-    Input: Mass, and dict of new options
+def Kroupa2001(m, newopts=None):
     """
+    Probability distribution function for kroupa 2001 IMF 
 
-    # Default params and override them 
-    opts = set_opts({'m0':0.1, 'm1':0.5, 'm2':1, 'mmax':100, 'p1':-1.3, 'p2':-2.3, 'p3':-2.3}, newopts)
-
-    return three_part_powerlaw(m, opts['m0'], opts['m0'], opts['m2'], opts['m0'], opts['m0'], opts['m0'], opts['m0'])
+    Input: Mass, (and optional: dict of new options. Input the  
+        default = {'m0':0.1, 'm1':0.5, 'm2':1, 'mmax':100, 'p1':-1.3, 'p2':-2.3, 'p3':-2.3}
+    """
 
-def ktg93():
+    # Default params and override them
+    default = {
+        "m0": 0.1,
+        "m1": 0.5,
+        "m2": 1,
+        "mmax": 100,
+        "p1": -1.3,
+        "p2": -2.3,
+        "p3": -2.3,
+    }
+    value_dict = default.copy()
+    if newopts:
+        value_dict.update(newopts)
+    return three_part_powerlaw(
+        m,
+        value_dict["m0"],
+        value_dict["m1"],
+        value_dict["m2"],
+        value_dict["mmax"],
+        value_dict["p1"],
+        value_dict["p2"],
+        value_dict["p3"],
+    )
+
+
+def ktg93(m, newopts):
     """
     Wrapper for mass distribution of KTG93
     """
@@ -243,9 +291,29 @@ def ktg93():
     # }
 
     # set options
-    opts = set_opts({'m0':0.1, 'm1':0.5, 'm2':1.0, 'mmax':80, 'p1':-1.3, 'p2':-2.2, 'p3':-2.7}, newopts)
-
-    return three_part_powerlaw(m, opts['m0'], opts['m0'], opts['m2'], opts['m0'], opts['m0'], opts['m0'], opts['m0'])
+    # opts = set_opts({'m0':0.1, 'm1':0.5, 'm2':1.0, 'mmax':80, 'p1':-1.3, 'p2':-2.2, 'p3':-2.7}, newopts)
+    defaults = {
+        "m0": 0.1,
+        "m1": 0.5,
+        "m2": 1.0,
+        "mmax": 80,
+        "p1": -1.3,
+        "p2": -2.2,
+        "p3": -2.7,
+    }
+    value_dict = default.copy()
+    if newopts:
+        value_dict.update(newopts)
+    return three_part_powerlaw(
+        m,
+        value_dict["m0"],
+        value_dict["m0"],
+        value_dict["m2"],
+        value_dict["m0"],
+        value_dict["m0"],
+        value_dict["m0"],
+        value_dict["m0"],
+    )
 
 
 # sub ktg93_lnspace
@@ -255,9 +323,54 @@ def ktg93():
 #     return ktg93(@_) * $m;
 # }
 
-## 
+
+def imf_tinsley1980(m):
+    """
+    From Tinsley 1980 (defined up until 80Msol)
+    """
+
+    return three_part_powerlaw(m, 0.1, 2.0, 10.0, 80.0, -2.0, -2.3, -3.3)
+
+
+def imf_scalo1986(m):
+    """
+    From Scalo 1986 (defined up until 80Msol)
+    """
+    return three_part_powerlaw(m, 0.1, 1.0, 2.0, 80.0, -2.35, -2.35, -2.70)
+
+
+def imf_scalo1998(m):
+    """
+    From scalo 1998
+    """
+    return three_part_powerlaw(m, 0.1, 1.0, 10.0, 80.0, -1.2, -2.7, -2.3)
+
+
+def imf_chabrier2003(m):
+    """
+    # IMF of Chabrier 2003 PASP 115:763-795    
+    """
+    Chabrier_logmc = math.log10(0.079)
+    Chabrier_sigma2 = 0.69 * 0.69
+    Chabrier_A1 = 0.158
+    Chabrier_A2 = 4.43e-2
+    Chabrier_x = -1.3
+    if m < 0:
+        print("below bounds")
+        raise ValueError
+    if 0 < m < 1.0:
+        A = 0.158
+        dm = math.log10(m) - Chabrier_logmc
+        p = Chabrier_A1 * math.exp(-(dm ** 2) / (2.0 * Chabrier_sigma2))
+    else:
+        p = Chabrier_A2 * (m ** Chabrier_x)
+    p = p / (0.1202462 * m * math.log(10))
+    return p
+
+
+########################################################################
 # Binary fractions
-## 
+########################################################################
 def Arenou2010_binary_fraction(m):
     # Arenou 2010 function for the binary fraction as f(M1)
     #
@@ -265,8 +378,11 @@ def Arenou2010_binary_fraction(m):
     # www.rssd.esa.int/doc_fetch.php?id=2969346
 
     return 0.8388 * math.tanh(0.688 * m + 0.079)
+
+
 # print(Arenou2010_binary_fraction(0.4))
 
+
 def raghavan2010_binary_fraction(m):
     """
     Fit to the Raghavan 2010 binary fraction as a function of   
@@ -280,19 +396,33 @@ def raghavan2010_binary_fraction(m):
     Rob then fitted the result
     """
 
-    return min(1.0, 
+    return min(
+        1.0,
         max(
-            (m**0.1) * (5.12310e-01) + (-1.02070e-01),
-            (1.10450e+00) * (m**(4.93670e-01)) + (-6.95630e-01)
-            )
-        )
+            (m ** 0.1) * (5.12310e-01) + (-1.02070e-01),
+            (1.10450e00) * (m ** (4.93670e-01)) + (-6.95630e-01),
+        ),
+    )
+
+
 # print(raghavan2010_binary_fraction(2))
 
-## 
+########################################################################
 # Period distributions
-## 
+########################################################################
+
+
+def duquennoy1991(x):
+    """
+    Period distribution from Duquennoy + Mayor 1991
 
-def sana12(M1, M2, a, P, amin, amax, x0, x1, p): # TODO: ? wtf. vague input
+    Input:
+        x: logperiod
+    """
+    return gaussian(x, 4.8, 2.3, -2, 12)
+
+
+def sana12(M1, M2, a, P, amin, amax, x0, x1, p):  # TODO: ? wtf. vague input
     """
     distribution of initial orbital periods as found by Sana et al. (2012)
     which is a flat distribution in ln(a) and ln(P) respectively for stars 
@@ -302,38 +432,49 @@ def sana12(M1, M2, a, P, amin, amax, x0, x1, p): # TODO: ? wtf. vague input
     and is be given by dp/dlogP ~ (logP)^p for all other binary configurations (default p=-0.55)
 
     arguments are M1, M2, a, Period P, amin, amax, x0=log P0, x1=log P1, p
+
+    example args: 10, 5, ?, P, ?, ?, -2, 12, -0.55
     """
 
     res = 0
 
-    if (M1 < 15) or (M2/M1 < 0.1):
-        res = (1.0/(math.log(amax)-math.log(amin)))
+    if (M1 < 15) or (M2 / M1 < 0.1):
+        res = 1.0 / (math.log(amax) - math.log(amin))
     else:
         p1 = 1.0 + p
 
         # For more details see the LyX document of binary_c for this distribution where the variables and normalizations are given
-        # we use the notation x=log(P), xmin=log(Pmin), x0=log(P0), ... to determine the 
+        # we use the notation x=log(P), xmin=log(Pmin), x0=log(P0), ... to determine the
         x = log_ln_converter * math.log(p)
         xmin = log_ln_converter * math.log(calc_period_from_sep(m1, m2, amin))
         xmin = log_ln_converter * math.log(calc_period_from_sep(m1, m2, amax))
 
-        #my $x0 = 0.15;
-        #my $x1 = 3.5;
+        # my $x0 = 0.15;
+        # my $x1 = 3.5;
 
-        A1 = 1.0/(x0**p * (x0-xmin) + (x1**p1 -x0**p1)/p1 + x1**p * (xmax-x1))
-        A0 = A1 * x0**p
-        A2 = A1 * x1**p
+        A1 = 1.0 / (
+            x0 ** p * (x0 - xmin) + (x1 ** p1 - x0 ** p1) / p1 + x1 ** p * (xmax - x1)
+        )
+        A0 = A1 * x0 ** p
+        A2 = A1 * x1 ** p
 
-        if (x < x0):
-            res = 3.0/2.0 * log_ln_converter * A0
-        elif (x > x1):
-            res = 3.0/2.0 * log_ln_converter * A2
+        if x < x0:
+            res = 3.0 / 2.0 * log_ln_converter * A0
+        elif x > x1:
+            res = 3.0 / 2.0 * log_ln_converter * A2
         else:
-            res = 3.0/2.0 * log_ln_converter * A1 * x**p
+            res = 3.0 / 2.0 * log_ln_converter * A1 * x ** p
 
     return res
+
+
 # print(sana12(10, 2, 10, 100, 1, 1000, math.log(10), math.log(1000), 6))
 
+########################################################################
+# Mass ratio distributions
+########################################################################
+
+
 def flatsections(x, opts):
     """
     
@@ -346,14 +487,16 @@ def flatsections(x, opts):
     y = 0
 
     for opt in opts:
-        dc = (opt['max'] - opt['min'])*opt['height']
+        dc = (opt["max"] - opt["min"]) * opt["height"]
         # print("added flatsection ({}-{})*{} = {}\n".format(opt['max'], opt['min'], opt['height'], dc))
         c += dc
-        if (opt['min'] <= x <= opt['max']):
-            y = opt['height']
+        if opt["min"] <= x <= opt["max"]:
+            y = opt["height"]
             # print("Use this\n")
-    c = 1.0/c
+    c = 1.0 / c
     y = y * c
     # print("flatsections gives C={}: y={}\n",c,y)
     return y
+
+
 # print(flatsections(1, [{'min': 0, 'max': 2, 'height': 3}]))
diff --git a/binarycpython/utils/functions.py b/binarycpython/utils/functions.py
index 779f0f7a2c9a1cff884af07fb7635814790f6f1b..71590f1202f52168fcc0df66d7b56c07d9891828 100644
--- a/binarycpython/utils/functions.py
+++ b/binarycpython/utils/functions.py
@@ -10,6 +10,7 @@ from binarycpython.utils.custom_logging_functions import (
     create_and_load_logging_function,
 )
 
+
 def parse_binary_c_version_info(version_info_string):
     version_info_dict = {}
 
diff --git a/binarycpython/utils/grid.py b/binarycpython/utils/grid.py
index 384a9c92448b71425bafafb5ab1fbbff31532716..e11b31a751ca294b4918a7de885ff351793b8d29 100644
--- a/binarycpython/utils/grid.py
+++ b/binarycpython/utils/grid.py
@@ -38,7 +38,7 @@ from binarycpython.utils.functions import get_defaults, parse_binary_c_version_i
 # TODO: add functionality to return the nuclear_mass_list
 # TODO: add functionality to return the source_list
 # TODO: add functionality to return the ensemble_list
-# TODO: add verbosity options
+# DONE: add verbosity options
 
 # DONE: add functionality to return the evcode_version_string
 # Make this function also an API call. Doest seem to get written to a buffer that is stored into a python object. rather its just written to stdout
@@ -193,7 +193,7 @@ class Population(object):
         parameter_name,
         condition=None,
     ):
-        """
+        """spec
         Function to add grid variables to the grid_options.
 
         TODO: Fix this complex function.
@@ -390,13 +390,14 @@ class Population(object):
             )
 
             # Load memory adress
-            self.grid_options[
-                "custom_logging_func_memaddr"
-            ], self.grid_options["custom_logging_shared_library_file"] = create_and_load_logging_function(
+            (
+                self.grid_options["custom_logging_func_memaddr"],
+                self.grid_options["custom_logging_shared_library_file"],
+            ) = create_and_load_logging_function(
                 custom_logging_code, verbose=self.grid_options["verbose"]
             )
 
-        elif self.grid_options["C_auto_logging"]: 
+        elif self.grid_options["C_auto_logging"]:
             # Generate real logging code
             logging_line = autogen_C_logging_code(
                 self.grid_options["C_auto_logging"],
@@ -409,9 +410,10 @@ class Population(object):
             )
 
             # Load memory adress
-            self.grid_options[
-                "custom_logging_func_memaddr"
-            ], self.grid_options["custom_logging_shared_library_file"] = create_and_load_logging_function(
+            (
+                self.grid_options["custom_logging_func_memaddr"],
+                self.grid_options["custom_logging_shared_library_file"],
+            ) = create_and_load_logging_function(
                 custom_logging_code, verbose=self.grid_options["verbose"]
             )
 
@@ -442,7 +444,7 @@ class Population(object):
         # TODO: add call to function that cleans up the temp customlogging dir, and unloads the loaded libraries.
         # TODO: make a switch to turn this off
         if clean_up_custom_logging_files:
-            self.clean_up_custom_logging(evol_type='single')
+            self.clean_up_custom_logging(evol_type="single")
 
         # Parse
         if parse_function:
@@ -450,46 +452,6 @@ class Population(object):
         else:
             return out
 
-    def remove_file(self, file, verbose):
-        """
-        Function to remove files but with verbosity
-        """
-
-        if os.path.exists(file):
-            try:
-                if verbose > 0: print("Removed {}".format())
-                os.remove(file)
-            except:
-                if verbose > 0: print("Error while deleting file {}".format(file))
-                raise FileNotFoundError
-
-    def clean_up_custom_logging(self, evol_type):
-        """
-        Function to clean up the custom logging. 
-        Has two types: 
-            'single': 
-                - removes the compiled shared library (which name is stored in grid_options['custom_logging_shared_library_file'])
-                - TODO: unloads/frees the memory allocated to that shared library (which is stored in grid_options['custom_logging_func_memaddr'])
-                - sets both to None
-            'multiple': 
-                - TODO: make this and design this
-        """
-
-        if evol_type=='single':
-            if self.grid_options['verbose'] > 0: print("Cleaning up the custom logging stuff. type: single")
-            # TODO: Unset custom logging code
-
-            # TODO: Unset function memory adress
-            print(self.grid_options["custom_logging_func_memaddr"]) 
-
-            # remove shared library files
-            if self.grid_options["custom_logging_shared_library_file"]:
-                self.remove_file(self.grid_options["custom_logging_shared_library_file"], self.grid_options['verbose'])
-
-        if evol_type=='population':
-            if self.grid_options['verbose'] > 0: print("Cleaning up the custom logging stuffs. type: population")
-            # TODO: make sure that these also work. not fully sure if necessary tho. whether its a single file, or a dict of files/memaddresses
-
     def evolve_population(self, parse_function, custom_arg_file=None):
         """
         The function that will evolve the population. This function contains many steps
@@ -546,140 +508,151 @@ class Population(object):
 
         # TODO: add call to function that cleans up the temp customlogging dir, and unloads the loaded libraries.
 
-    def evolve_population_comparison(
-        self, parse_function, amt, nodes, custom_arg_file=None
-    ):
+    ###################################################
+    # Testing functions
+    ###################################################
+
+    def test_evolve_population_lin(self):
         """
-        The function that will evolve the population. This function contains many steps
+        Test function to evolve a population in a linear way.
+
+        returns total time spent on the actual interfacing with binaryc 
         """
 
+        import time
+
+        #######################
         ### Custom logging code:
         self.set_custom_logging()
 
         ### Load store
         self.grid_options["store_memaddr"] = binary_c_python_api.return_store("")
 
-        # Execute.
+        #######################
+        # Dry run and getting starcount
+        self.grid_options['probtot'] = 0
 
-        ### Part to test running this with and without multiprocessing.
-        import time
-        import multiprocessing as mp
-        from pathos.multiprocessing import ProcessingPool as Pool
-        import random
-
-        start_no_mp = time.time()
-        self.set(base_filename="no_mp_{}.dat".format(amt))
-        # amt = 1000
-        masses = range(1, amt + 1)
-        for i in masses:
-            mass = random.randint(1, 500)
-            # print(mass)
-            self.set_bse_option("M_1", mass)
-            out = binary_c_python_api.run_population(
-                self.return_argline(),
-                self.grid_options["custom_logging_func_memaddr"],
-                self.grid_options["store_memaddr"],
-            )
-            # parse_function(self, out)
-        stop_no_mp = time.time()
+        self.generate_grid_code(dry_run=True)
 
-        print("without mp: {} systems took {}s".format(amt, stop_no_mp - start_no_mp))
+        self.load_grid_function()
 
-        #########################################################
+        self.dry_run()
 
-        start_mp = time.time()
+        total_starcount_run = self.grid_options['total_starcount']
+        print("Total starcount for this run will be: {}".format(total_starcount_run))
+
+        #######################
+        # Linear run
+        start_lin = time.time()
+
+        self.grid_options['probtot'] = 0 # To make sure that the values are reset. TODO: fix this in a cleaner way
+ 
+        self.generate_grid_code(dry_run=False)
 
-        self.set(base_filename="mp_{}.dat".format(amt))
+        self.load_grid_function()
 
-        def evolve_mp(mass):
-            # print(mass)
-            self.set_bse_option("M_1", mass)
-            # self.set(M_1=mass)
+        for i, system in enumerate(self.grid_options["system_generator"](self)):
+            full_system_dict = self.bse_options.copy()
+            full_system_dict.update(system)
+
+            binary_cmdline_string = self.return_argline(full_system_dict)
             out = binary_c_python_api.run_population(
-                self.return_argline(),
+                binary_cmdline_string,
                 self.grid_options["custom_logging_func_memaddr"],
                 self.grid_options["store_memaddr"],
             )
-            # parse_function(self, out)
-
-        p = Pool(nodes=nodes)
+            print("{}/{}".format(i+1, total_starcount_run), binary_cmdline_string)
 
-        def g(amt):
-            # amt = 1000
-            masses = range(1, amt + 1)
-            for i in masses:
-                mass = random.randint(1, 500)
-                yield mass
-            print("generator done")
+        stop_lin = time.time()
 
-        r = list(p.imap(evolve_mp, g(amt)))
-
-        stop_mp = time.time()
-        print("with mp: {} systems took {}s".format(amt, stop_mp - start_mp))
-
-        #########################################################
-        duration_no_mp = stop_no_mp - start_no_mp
-        duration_mp = stop_mp - start_mp 
-        ratio = duration_no_mp/duration_mp
         print(
-            "Running mp versus no mp is {} times faster!".format(
-                ratio
-            )
-        )
+            "Without mp: {} systems took {}s".format(total_starcount_run, stop_lin-start_lin))
+
+        return stop_lin-start_lin
 
-        return (nodes, amt, duration_no_mp, duration_mp, ratio)
 
-    def evolve_population_mp(self, parse_function, mass_distribution):
+    def test_evolve_population_mp(self):
         """
-        The function that will evolve the population. This function contains many steps
+        Test function to evolve a population in a parallel way.
+
+        returns total time spent on the actual interfacing with binaryc 
         """
 
+        import time
+        import multiprocessing as mp
+        from pathos.multiprocessing import ProcessingPool as Pool
+
+        #######################
         ### Custom logging code:
         self.set_custom_logging()
 
         ### Load store
         self.grid_options["store_memaddr"] = binary_c_python_api.return_store("")
 
-        # evolve with mp
+        #######################
+        # Dry run and getting starcount
+        self.grid_options['probtot'] = 0
+
+        self.generate_grid_code(dry_run=True)
+
+        self.load_grid_function()
+
+        self.dry_run()
+
+        total_starcount_run = self.grid_options['total_starcount']
+        print("Total starcount for this run will be: {}".format(total_starcount_run))
+
+        #######################
+        # MP run
+        self.grid_options['probtot'] = 0 # To make sure that the values are reset. TODO: fix this in a cleaner way
+
         start_mp = time.time()
 
-        def evolve_mp(mass):
-            self.set_bse_option("M_1", mass)
+        self.generate_grid_code(dry_run=False)
+
+        self.load_grid_function()
+
+        def evolve_system(binary_cmdline_string):
+            # print(binary_cmdline_string)
+            # pass
+            # print('next')
+            # self.set_bse_option("M_1", mass)
             out = binary_c_python_api.run_population(
-                self.return_argline(),
+                binary_cmdline_string,
                 self.grid_options["custom_logging_func_memaddr"],
                 self.grid_options["store_memaddr"],
             )
-            parse_function(self, out)
+            # # parse_function(self, out)
 
-        p = Pool(nodes=self.grid_options["amt_cores"])
+        def yield_system():
+            for i, system in enumerate(self.grid_options["system_generator"](self)):
+                full_system_dict = self.bse_options.copy()
+                full_system_dict.update(system)
 
-        def g(mass_distribution):
-            masses = mass_distribution
-            for mass in masses:
-                yield mass
+                binary_cmdline_string = self.return_argline(full_system_dict)
+                print("{}/{}".format(i+1, total_starcount_run), binary_cmdline_string)
+
+                yield binary_cmdline_string
             print("generator done")
 
-        r = list(p.imap(evolve_mp, g(mass_distribution)))
+        # Create pool
+        p = Pool(nodes=self.grid_options["amt_cores"])
+
+        # Execute
+        r = list(p.imap(evolve_system, yield_system()))
+
         stop_mp = time.time()
 
+        # Give feedback
         print(
             "with mp: {} systems took {}s using {} cores".format(
-                len(mass_distribution),
+                self.grid_options['total_starcount'],
                 stop_mp - start_mp,
                 self.grid_options["amt_cores"],
             )
         )
 
-        # TODO: add functionality to unload all the stores etc
-
-        #########################################################
-        # print("Running mp versus no mp is {} times faster!".format((start_no_mp-stop_no_mp)/(start_mp-stop_mp)))
-        # return (nodes, amt, stop_no_mp-start_no_mp, stop_mp-start_mp)
-
-    ###################################################
-    # Testing functions
-    ###################################################
+        return stop_mp - start_mp
 
     def test_evolve_single(self):
         """
@@ -711,26 +684,71 @@ class Population(object):
         print("\n\nBinary_c output:")
         print(output)
 
+    ###################################################
+    # Unordered functions
+    ###################################################
 
+    def remove_file(self, file, verbose):
+        """
+        Function to remove files but with verbosity
+        """
 
+        if os.path.exists(file):
+            try:
+                if verbose > 0:
+                    print("Removed {}".format())
+                os.remove(file)
+            except:
+                print("Error while deleting file {}".format(file))
+                raise FileNotFoundError
+
+    def clean_up_custom_logging(self, evol_type):
+        """
+        Function to clean up the custom logging. 
+        Has two types: 
+            'single': 
+                - removes the compiled shared library (which name is stored in grid_options['custom_logging_shared_library_file'])
+                - TODO: unloads/frees the memory allocated to that shared library (which is stored in grid_options['custom_logging_func_memaddr'])
+                - sets both to None
+            'multiple': 
+                - TODO: make this and design this
+        """
+
+        if evol_type == "single":
+            if self.grid_options["verbose"] > 0:
+                print("Cleaning up the custom logging stuff. type: single")
+            # TODO: Unset custom logging code
+
+            # TODO: Unset function memory adress
+            # print(self.grid_options["custom_logging_func_memaddr"])
+
+
+            # remove shared library files
+            if self.grid_options["custom_logging_shared_library_file"]:
+                self.remove_file(
+                    self.grid_options["custom_logging_shared_library_file"],
+                    self.grid_options["verbose"],
+                )
+
+        if evol_type == "population":
+            if self.grid_options["verbose"] > 0:
+                print("Cleaning up the custom logging stuffs. type: population")
+            # TODO: make sure that these also work. not fully sure if necessary tho. whether its a single file, or a dict of files/memaddresses
 
 
-    ###################################################
-    # Unordered functions
-    ###################################################
 
     def increment_probtot(self, prob):
         """
         Function to add to the total probability
         """
 
-        self.grid_options['probtot'] += prob
+        self.grid_options["probtot"] += prob
 
     def increment_count(self):
         """
         Function to add to the total amount of stars
         """
-        self.grid_options['count'] += 1
+        self.grid_options["count"] += 1
 
     ###################################################
     # Gridcode functions
@@ -739,6 +757,12 @@ class Population(object):
     def generate_grid_code(self, dry_run=False):
         """
         Function that generates the code from which the population will be made.
+    
+        dry_run: when True, it will return the starcount at the end so that we know what the total amount of systems is.
+
+
+        The phasevol values are handled by generating a second array
+
 
         # DONE: make a generator for this.  
         # TODO: Add correct logging everywhere
@@ -748,6 +772,10 @@ class Population(object):
         # TODO: add part to correctly set the values with the spacingsfunctions. 
         # TODO: add phasevol correctly
         # TODO: add centering center left right for the spacing
+        # TODO: add sensible description to this function.
+
+        Results in a generated file that contains a system_generator function.
+
         """
 
         if self.grid_options["verbose"] > 0:
@@ -802,7 +830,7 @@ class Population(object):
             )
         )
         code_string += indent * depth + "parameter_dict = {}\n"
-        code_string += indent * depth + "phasevol = 0\n"        
+        code_string += indent * depth + "phasevol = 1\n"
         code_string += indent * depth + "\n"
 
         code_string += indent * depth + "# setting probability lists\n"
@@ -823,10 +851,12 @@ class Population(object):
         code_string += indent * depth + "\n"
         # Generate code
         print("Generating grid code")
-        for loopnr, el in enumerate(sorted(
-            self.grid_options["grid_variables"].items(),
-            key=lambda x: x[1]["grid_variable_number"],
-        )):
+        for loopnr, el in enumerate(
+            sorted(
+                self.grid_options["grid_variables"].items(),
+                key=lambda x: x[1]["grid_variable_number"],
+            )
+        ):
             print("Constructing/adding: {}".format(el[0]))
             grid_variable = el[1]
 
@@ -849,7 +879,7 @@ class Population(object):
                     + "\n"
                 )
 
-                # Add condition failed action: #TODO: add correct exception error
+                # Add condition failed action:
                 code_string += (
                     indent * (depth + 1)
                     + 'print("Condition for {} not met!")'.format(
@@ -871,11 +901,31 @@ class Population(object):
                 + "\n"
             )
 
+            code_string += (
+                indent * depth
+                + "sampled_values_{} = {}".format(grid_variable["name"], grid_variable["spacingfunc"])
+                + "\n"
+            )
+
+            # TODO: Make clear that the phasevol only works good if you sample linearly in that thing. 
+            code_string += (
+                indent * depth
+                + "phasevol_{} = sampled_values_{}[1]-sampled_values_{}[0]".format(grid_variable["name"], grid_variable["name"], grid_variable["name"])
+                + "\n"
+            )
+
+            # # Some print statement
+            # code_string += (
+            #     indent * depth
+            #     + "print('phasevol_{}:', phasevol_{})".format(grid_variable["name"], grid_variable["name"])
+            #     + "\n"
+            # )
+
             # Adding for loop structure
             code_string += (
                 indent * depth
-                + "for {} in {}:".format(
-                    grid_variable["name"], grid_variable["spacingfunc"]
+                + "for {} in sampled_values_{}:".format(
+                    grid_variable["name"], grid_variable["name"]
                 )
                 + "\n"
             )
@@ -891,9 +941,10 @@ class Population(object):
                 + "\n"
             )
 
-
             # Set phasevol
-            code_string += (indent * (depth + 1) + "if {} > 0: phasevol *= {}\n".format(grid_variable["name"], grid_variable["name"]))
+            code_string += indent * (depth + 1) + "phasevol *= phasevol_{}\n".format(
+                grid_variable["name"], 
+            )
 
             #######################
             # Probabilities
@@ -957,15 +1008,15 @@ class Population(object):
             # Add some space
             code_string += "\n"
 
-            # The final parts of the code, where things are returned, are within the deepest loop, 
+            # The final parts of the code, where things are returned, are within the deepest loop,
             # but in some cases code from a higher loop needs to go under it again
-            # SO I think its better to put an ifstatement here that checks whether this is the last loop. 
-            if loopnr == len(self.grid_options["grid_variables"]) -1:
+            # SO I think its better to put an ifstatement here that checks whether this is the last loop.
+            if loopnr == len(self.grid_options["grid_variables"]) - 1:
 
                 #################################################################################
                 # Here are the calls to the queuing or other solution. this part is for every system
                 # Add comment
-                code_string += (indent * (depth + 1) +  "#" * 40 + "\n")
+                code_string += indent * (depth + 1) + "#" * 40 + "\n"
                 code_string += (
                     indent * (depth + 1)
                     + "# Code below will get evaluated for every generated system\n"
@@ -989,28 +1040,27 @@ class Population(object):
                 # set probability and phasevol values
                 code_string += (
                     indent * (depth + 1)
-                    + 'parameter_dict["{}"] = {}'.format(
-                        'probability', 'probability'
-                    )
+                    + 'parameter_dict["{}"] = {}'.format("probability", "probability")
                     + "\n"
                 )
                 code_string += (
                     indent * (depth + 1)
-                    + 'parameter_dict["{}"] = {}'.format(
-                        'phasevol', 'phasevol'
-                    )
+                    + 'parameter_dict["{}"] = {}'.format("phasevol", "phasevol")
                     + "\n"
                 )
 
-
                 # Some prints. will be removed
                 # code_string += indent * (depth + 1) + "print(probabilities)\n"
                 # code_string += (
                 #     indent * (depth + 1) + 'print("total_starcount: ", total_starcount)\n'
                 # )
 
+                # code_string += indent * (depth + 1) + "print(probability)\n"
+
                 # Increment total probability
-                code_string += indent * (depth + 1) + 'self.increment_probtot(probability)\n'
+                code_string += (
+                    indent * (depth + 1) + "self.increment_probtot(probability)\n"
+                )
 
                 if not dry_run:
                     # Handling of what is returned, or what is not.
@@ -1025,21 +1075,24 @@ class Population(object):
                 else:
                     code_string += indent * (depth + 1) + "pass\n"
 
-                code_string += (indent * (depth + 1) +  "#" * 40 + "\n")
+                code_string += indent * (depth + 1) + "#" * 40 + "\n"
 
             # increment depth
             depth += 1
 
-
         depth -= 1
         code_string += "\n"
         # Write parts to write below the part that yield the results. this has to go in a reverse order:
-        # Here comes the stuff that is put after the deepest nested part that calls returns stuff. 
-        for loopnr, el in enumerate(sorted(
-            self.grid_options["grid_variables"].items(),
-            key=lambda x: x[1]["grid_variable_number"], reverse=True
-        )):
-            code_string += (indent * (depth + 1) +  "#" * 40 + "\n")
+        # Here comes the stuff that is put after the deepest nested part that calls returns stuff.
+        for loopnr, el in enumerate(
+            sorted(
+                self.grid_options["grid_variables"].items(),
+                key=lambda x: x[1]["grid_variable_number"],
+                reverse=True,
+            )
+        ):
+            grid_variable = el[1]
+            code_string += indent * (depth + 1) + "#" * 40 + "\n"
             code_string += (
                 indent * (depth + 1)
                 + "# Code below is for finalising the handling of this iteration of the parameter\n"
@@ -1047,19 +1100,31 @@ class Population(object):
 
             # Set phasevol
             # TODO: fix. this isnt supposed to be the value that we give it here. discuss
-            code_string += (indent * (depth + 1) + "if {} > 0: phasevol /= {}\n".format(grid_variable["name"], grid_variable["name"]))
+            code_string += indent * (depth + 1) + "phasevol /= phasevol_{}\n".format(
+                grid_variable["name"]
+            )
+            code_string += indent * (depth + 1) + "\n"
 
-            depth -= 1
 
+            depth -= 1
 
         ################
         # Finalising print statements
         #
-        code_string += (indent * (depth + 1) +  "\n")
-        code_string += (indent * (depth + 1) +  "#" * 40 + "\n")
-        code_string += (indent * (depth + 1) + "print('Grid has handled {} stars'.format(total_starcount))\n")
-        code_string += (indent * (depth + 1) + "print('with a total probability of {}'.format(2))\n")
+        # code_string += indent * (depth + 1) + "\n"
+        code_string += indent * (depth + 1) + "#" * 40 + "\n"
+        code_string += (
+            indent * (depth + 1)
+            + "print('Grid has handled {} stars'.format(total_starcount))\n"
+        )
+        code_string += (
+            indent * (depth + 1) + "print('with a total probability of {}'.format(self.grid_options['probtot']))\n"
+        )
 
+        if dry_run:
+            code_string += (
+                indent * (depth + 1) + "return total_starcount\n"
+            )
 
 
         #################################################################################
@@ -1090,11 +1155,12 @@ class Population(object):
         - reset values to 0
         - remove grid file
         - unload grid function/module
+        - remove dry grid file
+        - unload dry grid function/module
         """
 
         pass
 
-
     def load_grid_function(self):
         """
         Test function to run grid stuff. mostly to test the import
@@ -1112,23 +1178,27 @@ class Population(object):
 
         spec = importlib.util.spec_from_file_location(
             "binary_c_python_grid",
-            os.path.join(self.grid_options["tmp_dir"], "example_grid.py"),
+            os.path.join(self.grid_options["gridcode_filename"]),
         )
         grid_file = importlib.util.module_from_spec(spec)
         spec.loader.exec_module(grid_file)
-        generator = grid_file.grid_code(self)
+        generator = grid_file.grid_code
 
-        self.grid_options['system_generator'] = generator
+        self.grid_options["system_generator"] = generator
 
         if self.grid_options["verbose"] > 0:
             print("Grid code loaded")
 
-        # for el in generator:
-        #     print(el)
+    def dry_run(self):
+        """
+        Function to dry run the grid and know how many stars it will run
+    
+        Requires the grid to be built as a dry run grid
+        """
 
-        # print(next(generator))
-        # print(next(generator))
-        # print(next(generator))
+        system_generator = self.grid_options["system_generator"]
+        total_starcount = system_generator(self)
+        self.grid_options['total_starcount'] = total_starcount
 
 
     def write_binary_c_calls_to_file(self, output_dir=None, output_filename=None):
@@ -1138,39 +1208,39 @@ class Population(object):
         On default this will write to the datadir, if it exists
         """
 
-        if self.grid_options['system_generator']:
-            if self.custom_options.get('data_dir', None):
-                binary_c_calls_output_dir = self.custom_options['data_dir']
-                print('yo')
+        if self.grid_options["system_generator"]:
+            if self.custom_options.get("data_dir", None):
+                binary_c_calls_output_dir = self.custom_options["data_dir"]
+                print("yo")
             else:
                 if not output_dir:
                     # if self.grid_options['verbose'] > 0:
-                    print("Error. No data_dir configured and you gave no output_dir. Aborting")
+                    print(
+                        "Error. No data_dir configured and you gave no output_dir. Aborting"
+                    )
                     raise ValueError
                 else:
                     binary_c_calls_output_dir = output_dir
 
             if output_filename:
-                binary_c_calls_filename = output_filename                 
+                binary_c_calls_filename = output_filename
             else:
-                binary_c_calls_filename = 'binary_c_calls.txt'
+                binary_c_calls_filename = "binary_c_calls.txt"
 
             print(binary_c_calls_output_dir, binary_c_calls_filename)
 
-            with open(os.path.join(binary_c_calls_output_dir, binary_c_calls_filename), 'w') as f:
-                for system in self.grid_options['system_generator']:
+            with open(
+                os.path.join(binary_c_calls_output_dir, binary_c_calls_filename), "w"
+            ) as f:
+                for system in self.grid_options["system_generator"]:
                     full_system_dict = self.bse_options.copy()
                     full_system_dict.update(system)
 
                     binary_cmdline_string = self.return_argline(full_system_dict)
-                    f.write(binary_cmdline_string + '\n')
+                    f.write(binary_cmdline_string + "\n")
         else:
-            if self.grid_options['verbose'] > 0: 
-                print("Error. No grid function found!")
-                raise KeyError
-
-
-
+            print("Error. No grid function found!")
+            raise KeyError
 
 
 ################################################################################################
diff --git a/binarycpython/utils/grid_options_defaults.py b/binarycpython/utils/grid_options_defaults.py
index 5a747233d2e0674ecf77b15fc44435cf533f0d5b..ea4119291d2d264e8b7ca5b8fedd77ce703936ce 100644
--- a/binarycpython/utils/grid_options_defaults.py
+++ b/binarycpython/utils/grid_options_defaults.py
@@ -7,6 +7,17 @@ grid_options_defaults_dict = {
     # general
     "amt_cores": 1,  # total amount of cores used to evolve the population
     "verbose": 0,  # Level of verbosity of the simulation
+    # binary_c files
+    "binary_c_executable": os.path.join(
+        os.environ["BINARY_C"], "binary_c-config"
+    ),  # TODO: make this more robust
+    "binary_c_shared_library": os.path.join(
+        os.environ["BINARY_C"], "src", "libbinary_c.so"
+    ),  # TODO: make this more robust
+    "binary_c_config_executable": os.path.join(
+        os.environ["BINARY_C"], "binary_c-config"
+    ),  # TODO: make this more robust
+    "binary_c_dir": os.environ["BINARYC_DIR"],
     # Output dir
     # "output_dir":
     # Custom logging
@@ -20,16 +31,15 @@ grid_options_defaults_dict = {
     "log_args": 0,  #
     "log_args_dir": "/tmp/",
     # Grid variables: instructions to generate the values of the parameters
-    "grid_variables": {}, # grid variables
+    "grid_variables": {},  # grid variables
     "grid_code": None,  # literal grid code
-    "gridcode_filename": None, # filename of gridcode
+    "gridcode_filename": None,  # filename of gridcode
     "count": 0,  # total count of systems
-    "probtot": 0, # total probabilit
-    "system_generator": None, # value that holds the function that generates the system (result of building the grid script)
+    "probtot": 0,  # total probabilit
+    "system_generator": None,  # value that holds the function that generates the system (result of building the grid script)
     # binary
     "binary": 0,
     # Locations:
-    "binary_c_dir": os.environ["BINARYC_DIR"],
     "tmp_dir": temp_dir(),
     # Probability:
     "weight": 1.0,  # weighting for the probability
diff --git a/binarycpython/utils/spacing_functions.py b/binarycpython/utils/spacing_functions.py
index b8f70c3ba456c1c06d7f019ea2eb0f24658a215e..fbd3d35c7ceb3459fd05e7b57c0c0a7754552ade 100644
--- a/binarycpython/utils/spacing_functions.py
+++ b/binarycpython/utils/spacing_functions.py
@@ -1,2 +1,4 @@
-def const():
-    return
+import numpy as np
+
+def const(min, max, steps):
+    return np.linspace(min, max, steps)
diff --git a/binarycpython/utils/useful_funcs.py b/binarycpython/utils/useful_funcs.py
index ac83dd89530d6c812b5609e16933e813b025dd09..cf1b5b444224bc2adad9d1e53da7e78f96d248ac 100644
--- a/binarycpython/utils/useful_funcs.py
+++ b/binarycpython/utils/useful_funcs.py
@@ -1,5 +1,5 @@
 ############################################################
-# Collection of useful functions. 
+# Collection of useful functions.
 
 # Part of this is copied/inspired by Robs
 # Rob's binary_stars module
@@ -8,13 +8,14 @@
 # calc_period_from_sep($m1,$m2,$sep) calculate the period given the separation.
 # calc_sep_from_period($m1,$m2,per) does the inverse.
 # M1,M2,separation are in solar units, period in days.
-# rzams($m,$z) gives you the ZAMS radius of a star 
+# rzams($m,$z) gives you the ZAMS radius of a star
 # ZAMS_collision($m1,$m2,$e,$sep,$z) returns 1 if stars collide on the ZAMS
 ###
 # TODO: check whether these are correct
 
-AURSUN = 2.150445198804013386961742071435e+02
-YEARDY = 3.651995478818308811241877265275e+02
+AURSUN = 2.150445198804013386961742071435e02
+YEARDY = 3.651995478818308811241877265275e02
+
 
 def calc_period_from_sep(M1, M2, sep):
     """
@@ -22,7 +23,8 @@ def calc_period_from_sep(M1, M2, sep):
     args : M1, M2, separation (Rsun)
     """
 
-    return YEARDY*(sep/AURSUN)*sqrt(sep/(AURSUN*(M1+M2)));
+    return YEARDY * (sep / AURSUN) * sqrt(sep / (AURSUN * (M1 + M2)))
+
 
 def calc_sep_from_period(M1, M2, period):
     """
@@ -30,22 +32,26 @@ def calc_sep_from_period(M1, M2, period):
     args : M1, M2, period (days)
     """
 
-    return AURSUN*(period*period*(M1+M2)/(YEARDY*YEARDY))**(1.0/3.0);
+    return AURSUN * (period * period * (M1 + M2) / (YEARDY * YEARDY)) ** (1.0 / 3.0)
+
 
 def roche_lobe(q):
     """
     A function to evaluate R_L/a(q), Eggleton 1983.
     """
-    
-    p = q**(1.0/3.0);
-    return(0.49*p*p/(0.6*p*p + log(1.0+p)));
+
+    p = q ** (1.0 / 3.0)
+    return 0.49 * p * p / (0.6 * p * p + log(1.0 + p))
+
 
 def ragb(m, z):
     """
     Function to calculate radius of a star at first thermal pulse as a function of mass (z=0.02)
     """
     # Z=0.02 only, but also good for Z=0.001
-    return m*40.0+20.0; # in Rsun
+    return m * 40.0 + 20.0
+    # in Rsun
+
 
 def zams_collission(m1, m2, sep, e, z):
     """
@@ -53,22 +59,23 @@ def zams_collission(m1, m2, sep, e, z):
     # determine if two stars collide on the ZAMS
     """
     # calculate periastron distance
-    peri_distance=(1.0-e) * sep
-    
+    peri_distance = (1.0 - e) * sep
+
     # calculate star radii
-    r1=rzams(m1, z);
-    r2=rzams(m2, z);
+    r1 = rzams(m1, z)
+    r2 = rzams(m2, z)
 
     if r1 + r2 > peri_distance:
         return 1
     else:
         return 0
 
+
 def rzams(m, z):
     """
     Function to determine the radius of a ZAMS star as a function of m and z:
     """
-    lzs = math.log10(z/0.02)
+    lzs = math.log10(z / 0.02)
 
     #
     # A function to evaluate Rzams
@@ -76,18 +83,108 @@ def rzams(m, z):
     #
 
     p = {}
-    xz = (666, 0.39704170, -0.32913574,  0.34776688,  0.37470851, 0.09011915, 8.52762600,-24.41225973, 56.43597107, 37.06152575, 5.45624060,    0.00025546, -0.00123461, -0.00023246,  0.00045519, 0.00016176,    5.43288900, -8.62157806, 13.44202049, 14.51584135, 3.39793084,    5.56357900,-10.32345224, 19.44322980, 18.97361347, 4.16903097,    0.78866060, -2.90870942,  6.54713531,  4.05606657, 0.53287322,    0.00586685, -0.01704237,  0.03872348,  0.02570041, 0.00383376,    1.71535900,  0.62246212, -0.92557761, -1.16996966,-0.30631491,    6.59778800, -0.42450044,-12.13339427,-10.73509484,-2.51487077,   10.08855000, -7.11727086,-31.67119479,-24.24848322,-5.33608972,    1.01249500,  0.32699690, -0.00923418, -0.03876858,-0.00412750,    0.07490166,  0.02410413,  0.07233664,  0.03040467, 0.00197741,    0.01077422,    3.08223400,  0.94472050, -2.15200882, -2.49219496,-0.63848738,   17.84778000, -7.45345690,-48.96066856,-40.05386135,-9.09331816,   0.00022582, -0.00186899,  0.00388783,  0.00142402,-0.00007671);
-
-    p['8'] = xz[36]+lzs*(xz[37]+lzs*(xz[38]+lzs*(xz[39]+lzs*xz[40])));
-    p['9'] = xz[41]+lzs*(xz[42]+lzs*(xz[43]+lzs*(xz[44]+lzs*xz[45]))); 
-    p['10'] = xz[46]+lzs*(xz[47]+lzs*(xz[48]+lzs*(xz[49]+lzs*xz[50])));
-    p['11'] = xz[51]+lzs*(xz[52]+lzs*(xz[53]+lzs*(xz[54]+lzs*xz[55])));
-    p['12'] = xz[56]+lzs*(xz[57]+lzs*(xz[58]+lzs*(xz[59]+lzs*xz[60])));
-    p['13'] = xz[61];
-    p['14'] = xz[62]+lzs*(xz[63]+lzs*(xz[64]+lzs*(xz[65]+lzs*xz[66])));
-    p['15'] = xz[67]+lzs*(xz[68]+lzs*(xz[69]+lzs*(xz[70]+lzs*xz[71])));
-    p['16'] = xz[72]+lzs*(xz[73]+lzs*(xz[74]+lzs*(xz[75]+lzs*xz[76])));
-
-    m195 = m**19.5
-    rad = ((p['8']*(m**2.5) + p['9']*(m**6.5) + p['10']*(m**11) + p['11']*(m**19)+p['12']*m195)/(p['13'] + p['14']*(m*m) + p['15']*(m**8.5) + (m**18.5) + p['16']*m195))
-    return rad
\ No newline at end of file
+    xz = (
+        666,
+        0.39704170,
+        -0.32913574,
+        0.34776688,
+        0.37470851,
+        0.09011915,
+        8.52762600,
+        -24.41225973,
+        56.43597107,
+        37.06152575,
+        5.45624060,
+        0.00025546,
+        -0.00123461,
+        -0.00023246,
+        0.00045519,
+        0.00016176,
+        5.43288900,
+        -8.62157806,
+        13.44202049,
+        14.51584135,
+        3.39793084,
+        5.56357900,
+        -10.32345224,
+        19.44322980,
+        18.97361347,
+        4.16903097,
+        0.78866060,
+        -2.90870942,
+        6.54713531,
+        4.05606657,
+        0.53287322,
+        0.00586685,
+        -0.01704237,
+        0.03872348,
+        0.02570041,
+        0.00383376,
+        1.71535900,
+        0.62246212,
+        -0.92557761,
+        -1.16996966,
+        -0.30631491,
+        6.59778800,
+        -0.42450044,
+        -12.13339427,
+        -10.73509484,
+        -2.51487077,
+        10.08855000,
+        -7.11727086,
+        -31.67119479,
+        -24.24848322,
+        -5.33608972,
+        1.01249500,
+        0.32699690,
+        -0.00923418,
+        -0.03876858,
+        -0.00412750,
+        0.07490166,
+        0.02410413,
+        0.07233664,
+        0.03040467,
+        0.00197741,
+        0.01077422,
+        3.08223400,
+        0.94472050,
+        -2.15200882,
+        -2.49219496,
+        -0.63848738,
+        17.84778000,
+        -7.45345690,
+        -48.96066856,
+        -40.05386135,
+        -9.09331816,
+        0.00022582,
+        -0.00186899,
+        0.00388783,
+        0.00142402,
+        -0.00007671,
+    )
+
+    p["8"] = xz[36] + lzs * (xz[37] + lzs * (xz[38] + lzs * (xz[39] + lzs * xz[40])))
+    p["9"] = xz[41] + lzs * (xz[42] + lzs * (xz[43] + lzs * (xz[44] + lzs * xz[45])))
+    p["10"] = xz[46] + lzs * (xz[47] + lzs * (xz[48] + lzs * (xz[49] + lzs * xz[50])))
+    p["11"] = xz[51] + lzs * (xz[52] + lzs * (xz[53] + lzs * (xz[54] + lzs * xz[55])))
+    p["12"] = xz[56] + lzs * (xz[57] + lzs * (xz[58] + lzs * (xz[59] + lzs * xz[60])))
+    p["13"] = xz[61]
+    p["14"] = xz[62] + lzs * (xz[63] + lzs * (xz[64] + lzs * (xz[65] + lzs * xz[66])))
+    p["15"] = xz[67] + lzs * (xz[68] + lzs * (xz[69] + lzs * (xz[70] + lzs * xz[71])))
+    p["16"] = xz[72] + lzs * (xz[73] + lzs * (xz[74] + lzs * (xz[75] + lzs * xz[76])))
+
+    m195 = m ** 19.5
+    rad = (
+        p["8"] * (m ** 2.5)
+        + p["9"] * (m ** 6.5)
+        + p["10"] * (m ** 11)
+        + p["11"] * (m ** 19)
+        + p["12"] * m195
+    ) / (
+        p["13"]
+        + p["14"] * (m * m)
+        + p["15"] * (m ** 8.5)
+        + (m ** 18.5)
+        + p["16"] * m195
+    )
+    return rad
diff --git a/coverage.txt b/coverage.txt
new file mode 100644
index 0000000000000000000000000000000000000000..547b8b8dbb9fe7f011bab00c2aaddb3b9b41a97a
--- /dev/null
+++ b/coverage.txt
@@ -0,0 +1,208 @@
+
+File: "binary_c-python/__init__.py"
+ - File is empty
+ Needed: 0; Found: 0; Missing: 0; Coverage: 0.0%
+
+File: "binary_c-python/setup.py"
+ - No module docstring
+ - No docstring for `readme`
+ Needed: 2; Found: 0; Missing: 2; Coverage: 0.0%
+
+File: "binary_c-python/examples/example_population.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/examples/examples.py"
+ - No module docstring
+ Needed: 6; Found: 5; Missing: 1; Coverage: 83.3%
+
+File: "binary_c-python/examples/example_plotting_distributions.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/examples/examples_custom_logging.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/examples/.ipynb_checkpoints/examples-checkpoint.py"
+ - No module docstring
+ Needed: 5; Found: 4; Missing: 1; Coverage: 80.0%
+
+File: "binary_c-python/snippets/multiprocessing_comparison.py"
+ - No module docstring
+ - No docstring for `parse_function`
+ - No docstring for `evolve_mp`
+ - No docstring for `g`
+ Needed: 5; Found: 1; Missing: 4; Coverage: 20.0%
+
+File: "binary_c-python/snippets/verbose.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/snippets/test.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/snippets/increment.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/snippets/phasevol.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/snippets/dict_merging.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/snippets/yield_test.py"
+ - No module docstring
+ - No docstring for `yielder`
+ Needed: 2; Found: 0; Missing: 2; Coverage: 0.0%
+
+File: "binary_c-python/snippets/montecarlo_example.py"
+ - No module docstring
+ - No docstring for `mass_montecarlo`
+ - No docstring for `calc_alpha`
+ - No docstring for `period_montecarlo`
+ - No docstring for `calc_beta`
+ - No docstring for `massratio_montecarlo`
+ - No docstring for `eccentricity_montecarlo`
+ Needed: 7; Found: 0; Missing: 7; Coverage: 0.0%
+
+File: "binary_c-python/snippets/defaultdict.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/snippets/multiprocessing_test.py"
+ - No module docstring
+ - No docstring for `calculate`
+ - No docstring for `calculate.run`
+ - No docstring for `run.f`
+ Needed: 4; Found: 0; Missing: 4; Coverage: 0.0%
+
+File: "binary_c-python/docs/source/conf.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/lib/__init__.py"
+ - File is empty
+ Needed: 0; Found: 0; Missing: 0; Coverage: 0.0%
+
+File: "binary_c-python/binarycpython/__init__.py"
+ - File is empty
+ Needed: 0; Found: 0; Missing: 0; Coverage: 0.0%
+
+File: "binary_c-python/binarycpython/core/__init__.py"
+ - File is empty
+ Needed: 0; Found: 0; Missing: 0; Coverage: 0.0%
+
+File: "binary_c-python/binarycpython/utils/grid_options_defaults.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/binarycpython/utils/distribution_functions.py"
+ - No module docstring
+ - No docstring for `calculate_constants_three_part_powerlaw`
+ - No docstring for `Arenou2010_binary_fraction`
+ Needed: 23; Found: 20; Missing: 3; Coverage: 87.0%
+
+File: "binary_c-python/binarycpython/utils/stellar_types.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/binarycpython/utils/spacing_functions.py"
+ - No module docstring
+ - No docstring for `const`
+ Needed: 2; Found: 0; Missing: 2; Coverage: 0.0%
+
+File: "binary_c-python/binarycpython/utils/useful_funcs.py"
+ - No module docstring
+ Needed: 7; Found: 6; Missing: 1; Coverage: 85.7%
+
+File: "binary_c-python/binarycpython/utils/__init__.py"
+ - File is empty
+ Needed: 0; Found: 0; Missing: 0; Coverage: 0.0%
+
+File: "binary_c-python/binarycpython/utils/custom_logging_functions.py"
+ - No module docstring
+ Needed: 9; Found: 8; Missing: 1; Coverage: 88.9%
+
+File: "binary_c-python/binarycpython/utils/functions.py"
+ - No module docstring
+ - No docstring for `parse_binary_c_version_info`
+ Needed: 13; Found: 11; Missing: 2; Coverage: 84.6%
+
+File: "binary_c-python/binarycpython/utils/grid.py"
+ - No module docstring
+ - No docstring for `Population`
+ - No docstring for `Population.set_bse_option`
+ - No docstring for `evolve_population_comparison.evolve_mp`
+ - No docstring for `evolve_population_comparison.g`
+ - No docstring for `evolve_population_mp.evolve_mp`
+ - No docstring for `evolve_population_mp.g`
+ Needed: 32; Found: 25; Missing: 7; Coverage: 78.1%
+
+File: "binary_c-python/binarycpython/utils/.ipynb_checkpoints/custom_logging_functions-checkpoint.py"
+ - No module docstring
+ Needed: 9; Found: 8; Missing: 1; Coverage: 88.9%
+
+File: "binary_c-python/binarycpython/utils/.ipynb_checkpoints/functions-checkpoint.py"
+ - No module docstring
+ - No docstring for `load_logfile`
+ Needed: 8; Found: 6; Missing: 2; Coverage: 75.0%
+
+File: "binary_c-python/tests/python_API_test.py"
+ - No module docstring
+ - No docstring for `test_run_binary`
+ - No docstring for `test_return_help`
+ - No docstring for `test_return_arglines`
+ - No docstring for `test_run_binary_with_log`
+ - No docstring for `test_run_binary_with_custom_logging`
+ - No docstring for `test_return_help_all`
+ - No docstring for `test_return_version_info`
+ - No docstring for `test_return_store`
+ - No docstring for `test_run_system`
+ Needed: 10; Found: 0; Missing: 10; Coverage: 0.0%
+
+File: "binary_c-python/tests/random_tests.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/tests/function_tests.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/tests/population/plot_scaling.py"
+ - No module docstring
+ - No docstring for `calc_mean_and_std`
+ Needed: 2; Found: 0; Missing: 2; Coverage: 0.0%
+
+File: "binary_c-python/tests/population/grid_tests_cmdline.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/tests/population/multiprocessing_via_population_comparison.py"
+ - No module docstring
+ - No docstring for `parse_function`
+ Needed: 3; Found: 1; Missing: 2; Coverage: 33.3%
+
+File: "binary_c-python/tests/population/grid_tests.py"
+ - No module docstring
+ Needed: 1; Found: 0; Missing: 1; Coverage: 0.0%
+
+File: "binary_c-python/tests/population/global_variable_for_distributions.py"
+ - No module docstring
+ - No docstring for `with_glob`
+ - No docstring for `without_glob`
+ Needed: 6; Found: 3; Missing: 3; Coverage: 50.0%
+
+File: "binary_c-python/.ipynb_checkpoints/python_API_test-checkpoint.py"
+ - No module docstring
+ - No docstring for `run_test_binary`
+ Needed: 2; Found: 0; Missing: 2; Coverage: 0.0%
+
+
+Overall statistics for 41 files (5 files are empty):
+Docstrings needed: 173; Docstrings found: 98; Docstrings missing: 75
+Total docstring coverage: 56.6%;  Grade: Not bad
diff --git a/examples/example_plotting_distributions.py b/examples/example_plotting_distributions.py
new file mode 100644
index 0000000000000000000000000000000000000000..49c1c9167938782d6bef395d52796dba7c262955
--- /dev/null
+++ b/examples/example_plotting_distributions.py
@@ -0,0 +1,166 @@
+import math
+
+import numpy as np
+import matplotlib.pyplot as plt
+
+from binarycpython.utils.distribution_functions import (
+    three_part_powerlaw,
+    Kroupa2001,
+    Arenou2010_binary_fraction,
+    raghavan2010_binary_fraction,
+    imf_scalo1998,
+    imf_scalo1986,
+    imf_tinsley1980,
+    imf_scalo1998,
+    imf_chabrier2003,
+    flatsections,
+    duquennoy1991,
+    sana12,
+)
+from binarycpython.utils.useful_funcs import calc_sep_from_period
+
+################################################
+# Example script to plot the available probability distributions.
+# TODO:
+
+################################################
+# mass distribution plots
+################################################
+# mass_values = np.arange(0.11, 80, .1)
+
+# kroupa_probability = [Kroupa2001(mass) for mass in mass_values]
+# scalo1986 = [imf_scalo1986(mass) for mass in mass_values]
+# tinsley1980 = [imf_tinsley1980(mass) for mass in mass_values]
+# scalo1998 = [imf_scalo1998(mass) for mass in mass_values]
+# chabrier2003 = [imf_chabrier2003(mass) for mass in mass_values]
+
+# plt.plot(mass_values, kroupa_probability, label='Kroupa')
+# plt.plot(mass_values, scalo1986, label='scalo1986')
+# plt.plot(mass_values, tinsley1980, label='tinsley1980')
+# plt.plot(mass_values, scalo1998, label='scalo1998')
+# plt.plot(mass_values, chabrier2003, label='chabrier2003')
+
+# plt.title('Probability distribution for mass of primary')
+# plt.ylabel(r'Probability')
+# plt.xlabel(r'Mass (M$_{\odot}$)')
+# plt.yscale('log')
+# plt.xscale('log')
+# plt.grid()
+# plt.legend()
+# plt.show()
+
+################################################
+# Binary fraction distributions
+################################################
+# arenou_binary_distibution = [Arenou2010_binary_fraction(mass) for mass in mass_values]
+# raghavan2010_binary_distribution = [raghavan2010_binary_fraction(mass) for mass in mass_values ]
+
+# plt.plot(mass_values, arenou_binary_distibution, label='arenou 2010')
+# plt.plot(mass_values, raghavan2010_binary_distribution, label='Raghavan 2010')
+# plt.title('Binary fractions distributions')
+# plt.ylabel(r'Binary fraction')
+# plt.xlabel(r'Mass (M$_{\odot}$)')
+# # plt.yscale('log')
+# plt.xscale('log')
+# plt.grid()
+# plt.legend()
+# plt.show()
+
+
+################################################
+# Mass ratio distributions
+################################################
+
+# mass_ratios = np.arange(0, 1, .01)
+# example_mass = 2
+# flat_dist = [flatsections(q, opts=[{'min':0.1/example_mass, 'max':0.8, 'height':1}, {'min': 0.8, 'max':1.0, 'height': 1.0}]) for q in mass_ratios]
+
+# plt.plot(mass_ratios, flat_dist, label='Flat')
+# plt.title('Mass ratio distributions')
+# plt.ylabel(r'Probability')
+# plt.xlabel(r'Mass ratio (q = $\frac{M1}{M2}$) ')
+# plt.grid()
+# plt.legend()
+# plt.show()
+
+################################################
+# Period distributions
+################################################
+# TODO: fix this
+
+logperiod_values = np.arange(-2, 12, 0.1)
+duquennoy1991_distribution = [duquennoy1991(logper) for logper in logperiod_values]
+
+# Sana12 distributions
+m1 = 10
+m2 = 5
+period_min = 10 ** 0.15
+period_max = 10 ** 5.5
+
+sana12_distribution_q05 = [
+    sana12(
+        m1,
+        m2,
+        calc_sep_from_period(m1, m2, 10 ** logper),
+        10 ** logper,
+        calc_sep_from_period(m1, m2, period_min),
+        calc_sep_from_period(m1, m2, period_max),
+        math.log10(period_min),
+        math.log10(period_max),
+        -0.55,
+    )
+    for logper in logperiod_values
+]
+
+m1 = 10
+m2 = 1
+sana12_distribution_q01 = [
+    sana12(
+        m1,
+        m2,
+        calc_sep_from_period(m1, m2, 10 ** logper),
+        10 ** logper,
+        calc_sep_from_period(m1, m2, period_min),
+        calc_sep_from_period(m1, m2, period_max),
+        math.log10(period_min),
+        math.log10(period_max),
+        -0.55,
+    )
+    for logper in logperiod_values
+]
+
+m1 = 10
+m2 = 10
+sana12_distribution_q1 = [
+    sana12(
+        m1,
+        m2,
+        calc_sep_from_period(m1, m2, 10 ** logper),
+        10 ** logper,
+        calc_sep_from_period(m1, m2, period_min),
+        calc_sep_from_period(m1, m2, period_max),
+        math.log10(period_min),
+        math.log10(period_max),
+        -0.55,
+    )
+    for logper in logperiod_values
+]
+
+
+plt.plot(logperiod_values, duquennoy1991_distribution, label="Duquennoy & Mayor 1991")
+plt.plot(logperiod_values, sana12_distribution_q05, label="Sana 12 (q=0.5)")
+plt.plot(logperiod_values, sana12_distribution_q01, label="Sana 12 (q=0.1)")
+plt.plot(logperiod_values, sana12_distribution_q1, label="Sana 12 (q=1)")
+plt.title("Period distributions")
+plt.ylabel(r"Probability")
+plt.xlabel(r"Log10(orbital period)")
+plt.grid()
+plt.legend()
+plt.show()
+
+
+################################################
+# Sampling part of distribution and calculating probability ratio
+################################################
+
+# TODO show the difference between sampling over the full range, or taking a smaller range initially and compensating for it.
diff --git a/examples/example_population.py b/examples/example_population.py
index e466c45507c6a02c39ab23b36fd713e6196d664c..4ee06d7d7bd669dd4b9f19bb02a3180059597723 100644
--- a/examples/example_population.py
+++ b/examples/example_population.py
@@ -8,7 +8,6 @@ from binarycpython.utils.grid import Population
 from binarycpython.utils.functions import get_help_all, get_help, create_hdf5
 
 
-
 #########################################################
 # This file serves as an example for running a population.
 # The use of help(<function>) is a good way to inspect what parameters are there to use
@@ -52,7 +51,7 @@ example_pop.set(
 )
 
 # TODO: show when reading from file
-example_pop.set_custom_logging() # TODO: remove this one
+example_pop.set_custom_logging()  # TODO: remove this one
 
 # Adding grid variables:
 example_pop.add_grid_variable(
@@ -90,4 +89,4 @@ example_pop.export_all_info()
 # Wrapping up the results to an hdf5 file can be done by using the create_hdf5(<directory containing data and settings>)
 # This function takes the settings file (ending in _settings.json) and the data files (ending in .dat) from the data_dir
 # and packing them into an hdf5 file, which is then written into the same data_dir directory
-create_hdf5(example_pop.custom_options["data_dir"])
\ No newline at end of file
+create_hdf5(example_pop.custom_options["data_dir"])
diff --git a/setup.py b/setup.py
index 36a2278eee6b36974005a9bf6026e4048c4194ed..086900e4bbdce83840ef56beafaf9a5681dfc834 100644
--- a/setup.py
+++ b/setup.py
@@ -7,17 +7,19 @@ import subprocess
 import re
 import sys
 
-# TODO: make this clean
 GSL_DIR = os.getenv("GSL_DIR", None)
 if not GSL_DIR:
     print(
         "Warning: GSL_DIR is not set, this might lead to errors along the installation if there is no other version of GSL in the include dirs"
     )
 
-
 # TODO: write code to know exact parent directory of this file.
 CWD = os.getcwd()
 
+############################################################
+# Getting information from binary_c
+############################################################
+
 # binary_c must be installed.
 binary_c_config = os.environ["BINARY_C"] + "/binary_c-config"
 
@@ -61,11 +63,14 @@ for x in defines:
         y = lone.match(x)
         if y:
             binary_c_define_macros.extend([(y.group(1), None)])
+
 # add API header file
 API_h = os.environ["BINARY_C"] + "/src/API/binary_c_API.h"
 binary_c_define_macros.extend([("BINARY_C_API_H", API_h)])
 
-#
+############################################################
+# Setting all directories and libraries to their final values
+############################################################
 include_dirs = (
     [os.environ["BINARY_C"] + "/src", os.environ["BINARY_C"] + "/src/API", "include",]
     + binary_c_incdirs
@@ -90,7 +95,6 @@ runtime_library_dirs = [
     # os.path.join(CWD, "binarycpython/core/"),
 ] + binary_c_libdirs
 
-# TODO: move the creeation of all the include stuff to here so its easier to print everything
 # print('\n')
 # print("binary_c_config: ", str(binary_c_config) + "\n")
 # print("incdirs: ", str(include_dirs) + "\n")
@@ -103,6 +107,11 @@ runtime_library_dirs = [
 # print("macros: ", str(binary_c_define_macros) + "\n")
 # print('\n')
 
+############################################################
+# Making the extension function
+############################################################
+# TODO: fix that this one also compiles the code itself
+
 binary_c_python_api_module = Extension(
     # name="binarycpython.core.binary_c",
     name="binary_c_python_api",
@@ -123,12 +132,16 @@ def readme():
         return f.read()
 
 
+############################################################
+# Making the extension function
+############################################################
+
 setup(
     name="binarycpython",
-    version="0.1",
-    description="This is a python API for binary_c by Rob Izzard and collaborators",
-    author="Jeff Andrews and Robert Izzard and David Hendriks",
-    author_email="andrews@physics.uoc.gr and r.izzard@surrey.ac.uk and rob.izzard@gmail.com and davidhendriks93@gmail.com",
+    version="0.2",
+    description="This is a python API for binary_c by David Hendriks, Rob Izzard and collaborators. Based on the initial set up by Jeff andrews",
+    author=" David Hendriks, Robert Izzard and Jeff Andrews",
+    author_email="davidhendriks93@gmail.com/d.hendriks@surrey.ac.uk, r.izzard@surrey.ac.uk/rob.izzard@gmail.com andrews@physics.uoc.gr",
     long_description=readme(),
     url="https://gitlab.eps.surrey.ac.uk/ri0005/binary_c-python",
     license="",
diff --git a/test_systems/README.md b/test_systems/README.md
index c04d3e1049f4e5b2939a3e42619b48cf0782a007..45fd1d7814ccfe3ca074540e650a11e32a738e1b 100644
--- a/test_systems/README.md
+++ b/test_systems/README.md
@@ -1,2 +1 @@
-# 
-This directory is mean to contain script to generate test systems.
+# This directory is meant to contain script to generate test systems.
diff --git a/tests/population/global_variable_for_distributions.py b/tests/population/global_variable_for_distributions.py
index 47b5d3f8aa8cdb531b6ee04469d15750eec842e5..e9114cbc6524c015fe00abddebe5f7b662082e53 100644
--- a/tests/population/global_variable_for_distributions.py
+++ b/tests/population/global_variable_for_distributions.py
@@ -6,6 +6,7 @@ powerlaw_const = None
 # Function/test to see the speed up of making values global so we dont have to calculate them each time
 # Theres only a factor ~2 increase here.
 
+
 def with_glob():
     global powerlaw_const
     if not powerlaw_const:
@@ -15,8 +16,9 @@ def with_glob():
         # print('defined')
         return powerlaw_const
 
+
 def without_glob():
-    powerlaw_const = powerlaw_constant(10, 100, -2) 
+    powerlaw_const = powerlaw_constant(10, 100, -2)
     return powerlaw_const
 
 
@@ -36,6 +38,7 @@ def powerlaw_constant(min_val, max_val, k):
     powerlaw_const = k1 / (max_val ** k1 - min_val ** k1)
     return powerlaw_const
 
+
 def powerlaw(min_val, max_val, k, x):
     """
     Single powerlaw with index k at x from min to max
@@ -47,7 +50,7 @@ def powerlaw(min_val, max_val, k, x):
         raise ValueError
 
     if (x < min_val) or (x > max_val):
-        print('value is out of bounds')
+        print("value is out of bounds")
         return 0
 
     else:
@@ -63,6 +66,7 @@ def powerlaw(min_val, max_val, k, x):
         # )
         return y
 
+
 def powerlaw_with(min_val, max_val, k, x):
     """
     Single powerlaw with index k at x from min to max
@@ -90,6 +94,7 @@ def powerlaw_with(min_val, max_val, k, x):
         # )
         return y
 
+
 steps = 1000000
 
 start_time_without_glob = time.time()
@@ -102,7 +107,11 @@ for i in range(steps):
     powerlaw_with(10, 100, -2, 20)
 stop_time_with_glob = time.time()
 
-total_time_without = stop_time_without_glob-start_time_without_glob
+total_time_without = stop_time_without_glob - start_time_without_glob
 total_time_with = stop_time_with_glob - start_time_with_glob
 
-print("without: {}\nwith: {}\nRatio: {}".format(total_time_without, total_time_with, total_time_without/total_time_with))
+print(
+    "without: {}\nwith: {}\nRatio: {}".format(
+        total_time_without, total_time_with, total_time_without / total_time_with
+    )
+)
diff --git a/tests/population/grid_tests.py b/tests/population/grid_tests.py
index e439dc33865c3999c3b22e24524383fa9fe7a331..a6ae23c596b4111ade8203e9f6d3e3e4837dd8cb 100644
--- a/tests/population/grid_tests.py
+++ b/tests/population/grid_tests.py
@@ -205,8 +205,6 @@ test_pop.set(
 # test_pop.set(extra_prob_function=ding)
 
 
-
-
 ### Cleaning up custom logging code
 
 # test_pop.set(C_logging_code='Printf("MY_STELLAR_DATA time=%g mass=%g radius=%g\\n", stardata->model.time, stardata->star[0].mass, stardata->star[0].radius);')
@@ -214,30 +212,99 @@ test_pop.set(
 # quit()
 
 
+### Grid generating testing
+# test_pop.add_grid_variable(
+#     name="lnm1",
+#     longname="log primary mass",
+#     valuerange=[10, 20],
+#     resolution="10",
+#     spacingfunc="np.linspace(0.213, 10.2, 10)",
+#     precode="M_1=math.exp(lnm1)",
+#     probdist="flat(M_1)",
+#     # probdist='self.custom_options["extra_prob_function"](M_1)',
+#     dphasevol="",
+#     parameter_name="M_1",
+#     condition="",
+# )
 
+# ### Grid generating test.
+# test_pop.add_grid_variable(
+#     name="period",
+#     longname="period",
+#     valuerange=["M_1", 20],
+#     resolution="10",
+#     spacingfunc="np.linspace(1, 10, 10)",
+#     precode="orbital_period = period**2",
+#     probdist="flat(orbital_period)",
+#     parameter_name="orbital_period",
+#     dphasevol="",
+#     condition='self.grid_options["binary"]==0',
+# )
 
+# test_pop.generate_grid_code()
 
+# test_pop.load_grid_function()
 
+# test_pop.write_binary_c_calls_to_file(output_dir="/home/david/Desktop")
 
 
+# print(test_pop.grid_options["code_string"])
 
+####
+# Dry run:
+# test_pop.set(verbose=1)
+# test_pop.add_grid_variable(
+#     name="lnm1",
+#     longname="log primary mass",
+#     valuerange=[10, 20],
+#     resolution="10",
+#     spacingfunc="np.linspace(0.213, 10.2, 10)",
+#     precode="M_1=math.exp(lnm1)",
+#     probdist="flat(M_1)",
+#     # probdist='self.custom_options["extra_prob_function"](M_1)',
+#     dphasevol="",
+#     parameter_name="M_1",
+#     condition="",
+# )
 
+# ### Grid generating test.
+# test_pop.add_grid_variable(
+#     name="period",
+#     longname="period",
+#     valuerange=["M_1", 20],
+#     resolution="10",
+#     spacingfunc="np.linspace(1, 10, 10)",
+#     precode="orbital_period = period**2",
+#     probdist="flat(orbital_period)",
+#     parameter_name="orbital_period",
+#     dphasevol="",
+#     condition='self.grid_options["binary"]==0',
+# )
 
+# test_pop.generate_grid_code(dry_run=True)
 
+# test_pop.load_grid_function()
 
+# test_pop.dry_run()
 
+# ###
+# testing population:
+test_pop.set(verbose=1, 
+    amt_cores=1,
+    binary=0,
+)
 
-### Grid generating testing
 test_pop.add_grid_variable(
-    name="lnm1",
+    name="M_1",
     longname="log primary mass",
-    valuerange=[10, 20],
-    resolution="10",
-    spacingfunc="np.linspace(0.213, 10.2, 10)",
-    precode="M_1=math.exp(lnm1)",
+    valuerange=[10, 100],
+    resolution="20",
+    spacingfunc="const(10, 100, 20)",
+    precode="",
+    # precode="M_1=math.exp(lnm1)",
     probdist="flat(M_1)",
     # probdist='self.custom_options["extra_prob_function"](M_1)',
-    dphasevol="",
+    dphasevol="dlnm1",
     parameter_name="M_1",
     condition="",
 )
@@ -247,20 +314,22 @@ test_pop.add_grid_variable(
     name="period",
     longname="period",
     valuerange=["M_1", 20],
-    resolution="10",
-    spacingfunc="np.linspace(1, 10, 10)",
+    resolution="20",
+    spacingfunc="np.linspace(1, 10, 20)",
     precode="orbital_period = period**2",
     probdist="flat(orbital_period)",
     parameter_name="orbital_period",
-    dphasevol="",
+    dphasevol="dper",
     condition='self.grid_options["binary"]==0',
 )
 
-test_pop.generate_grid_code()
-
-test_pop.load_grid_function()
+test_pop.set(verbose=1, 
+    amt_cores=4,
+    binary=0,
+)
 
-test_pop.write_binary_c_calls_to_file(output_dir='/home/david/Desktop')
 
+test_pop.test_evolve_population()
 
-# print(test_pop.grid_options["code_string"])
+# print(test_pop.grid_options["probtot"])
+# print(test_pop.grid_options["count"])
\ No newline at end of file
diff --git a/tests/population/multiprocessing_via_population_comparison.py b/tests/population/multiprocessing_via_population_comparison.py
index 1548516af0edaae4371921d5d71c4fadef91c62d..bf66c753be15e77455437625743b0c2af4471a52 100644
--- a/tests/population/multiprocessing_via_population_comparison.py
+++ b/tests/population/multiprocessing_via_population_comparison.py
@@ -79,12 +79,12 @@ def parse_function(self, output):
 test_pop = Population()
 # test_pop.set(
 #     C_logging_code="""
-# if(stardata->star[0].SN_type != SN_NONE)    
+# if(stardata->star[0].SN_type != SN_NONE)
 # {
 #     if (stardata->model.time < stardata->model.max_evolution_time)
 #     {
 #         Printf("DAVID_SN %30.12e %g %g %g %d\\n",
-#             // 
+#             //
 #             stardata->model.time, // 1
 #             stardata->star[0].mass, //2
 #             stardata->previous_stardata->star[0].mass, //3
diff --git a/tests/population/name_testcase.pdf b/tests/population/name_testcase.pdf
deleted file mode 100644
index e7f0d56f02432cf56e04889e6ed52c1e6e20b628..0000000000000000000000000000000000000000
Binary files a/tests/population/name_testcase.pdf and /dev/null differ
diff --git a/tests/population/name_testcase.png b/tests/population/name_testcase.png
deleted file mode 100644
index 1173cb0233aedb2dc3f0e3d110730265461364a4..0000000000000000000000000000000000000000
Binary files a/tests/population/name_testcase.png and /dev/null differ
diff --git a/tests/population/plot_scaling.py b/tests/population/plot_scaling.py
deleted file mode 100644
index f0594e227d43f032d44db91015e82b644373a474..0000000000000000000000000000000000000000
--- a/tests/population/plot_scaling.py
+++ /dev/null
@@ -1,116 +0,0 @@
-import matplotlib.pyplot as plt
-import pandas as pd
-import numpy as np
-
-
-def calc_mean_and_std(arr):
-    return np.mean(arr), np.std(arr)
-
-
-
-# Configure
-result_file = 'comparison_result.dat'
-result_file = 'comparison_result_astro1.dat'
-name_testcase = 'Astro1'
-
-
-
-
-
-
-# 
-results = []
-
-with open(result_file, 'r') as f:
-    for line in f:
-        res = list(eval(line.strip()))
-        if len(res)==6:
-            res.append(res[-2]/res[-1])
-        results.append(res)
-
-# make dataframe
-headers = ['cores', 'total_systems', 'total_time_sequentially', 'total_time_multiprocessing', 'ratio']
-
-df = pd.DataFrame(results)
-df.columns = headers
-
-# Select unique amounts
-unique_amt_cores = df['cores'].unique()
-unique_amt_systems = df['total_systems'].unique()
-
-# Create dictionary with calculated means and stdevs etc
-calculated_results = []
-for i in unique_amt_cores:
-    for j in unique_amt_systems:
-        total_time_sequential_list, total_time_multiprocessing_list, total_ratio_list = [], [], []
-
-        for res in results:
-            if (res[0]==i) & (res[1]==j):
-                total_time_sequential_list.append(res[2])
-                total_time_multiprocessing_list.append(res[3])
-                total_ratio_list.append(res[4])
-
-        if (total_time_sequential_list) and (total_time_multiprocessing_list) and (total_ratio_list):
-            # calculate stuff
-            mean_time_sequential, std_sequential = calc_mean_and_std(np.array(total_time_sequential_list))
-            mean_time_multiprocessing, std_multiprocessing = calc_mean_and_std(np.array(total_time_multiprocessing_list))
-            mean_ratio, std_ratio = calc_mean_and_std(np.array(total_ratio_list))
-
-            # make dict
-            res_dict = {
-                    'cores': i, 
-                    'systems': j, 
-                    'mean_time_sequential': mean_time_sequential, 
-                    'mean_time_multiprocessing':mean_time_multiprocessing,
-                    'mean_ratio': mean_ratio,
-                    'std_sequential': std_sequential,
-                    'std_multiprocessing': std_multiprocessing,
-                    'std_ratio': std_ratio,
-                    'total_runs': len(total_time_sequential_list)
-                }
-
-            calculated_results.append(res_dict)
-
-
-# Plot
-x_position_shift = 0
-y_position_shift = -0.05
-max_speedup = 0
-for amt_systems in unique_amt_systems:
-
-    cores = []
-    speedup = []
-    std = [] 
-
-    for el in calculated_results:
-        if el['systems']==amt_systems:
-
-            cores.append(el['cores'] + x_position_shift)
-            speedup.append(el['mean_ratio'])
-            std.append(el['std_ratio'])
-
-            if el['mean_ratio'] > max_speedup:
-                max_speedup = el['mean_ratio']
-
-
-            # add number of runs its based on
-            plt.text(el['cores'] + x_position_shift+0.01, el['mean_ratio']+y_position_shift, el['total_runs'])
-
-    plt.errorbar(cores, speedup, std, linestyle='None', marker='^', label='{} systems'.format(amt_systems))
-    x_position_shift += 0.1
-
-plt.title("Speed up ratio vs amount of cores for different amounts of systems on {}".format(name_testcase))
-plt.xlabel("Amount of cores used")
-plt.ylabel("Speed up ratio (time_linear/time_parallel)")
-
-plt.xlim(0, max(unique_amt_cores) + 4)
-plt.ylim(0, max_speedup + 2)
-
-plt.grid()
-plt.legend()
-
-plt.savefig('scaling_{}.png'+'.png')
-plt.savefig('scaling_{}.png'+'.pdf')
-plt.savefig('scaling_{}.svg'+'.pdf')
-
-plt.show()
\ No newline at end of file
diff --git a/tests/population/profile_run.txt b/tests/population/profile_run.txt
new file mode 100644
index 0000000000000000000000000000000000000000..ccdeddae2bb5a06ceca311d9b53de11dfb074072
Binary files /dev/null and b/tests/population/profile_run.txt differ
diff --git a/tests/population/profile_test.txt b/tests/population/profile_test.txt
new file mode 100644
index 0000000000000000000000000000000000000000..34a8484623b8fcdd999256a15ff7f525f58c1b78
Binary files /dev/null and b/tests/population/profile_test.txt differ
diff --git a/tests/population/readout_profile.py b/tests/population/readout_profile.py
new file mode 100644
index 0000000000000000000000000000000000000000..5975c324f599ac8db9aebdaaf41f31ee35d8982e
--- /dev/null
+++ b/tests/population/readout_profile.py
@@ -0,0 +1,5 @@
+import pstats
+p = pstats.Stats('profile_run.txt')
+p.sort_stats('tottime').print_stats(10)
+
+
diff --git a/tests/population/comparison_result_astro1.dat b/tests/population/scaling/comparison_result_astro1.dat
similarity index 100%
rename from tests/population/comparison_result_astro1.dat
rename to tests/population/scaling/comparison_result_astro1.dat
diff --git a/tests/population/scaling/comparison_result_david-Lenovo-IdeaPad-S340-14IWL.dat b/tests/population/scaling/comparison_result_david-Lenovo-IdeaPad-S340-14IWL.dat
new file mode 100644
index 0000000000000000000000000000000000000000..81ae05378baa7c43c4a8343a0e7b0ed67ac4b39f
--- /dev/null
+++ b/tests/population/scaling/comparison_result_david-Lenovo-IdeaPad-S340-14IWL.dat
@@ -0,0 +1,2 @@
+(2, 225, 23.872895002365112, 17.51114845275879, 1.3632969343369403)
+(2, 625, 76.88419461250305, 63.15470623970032, 1.2173945409656914)
diff --git a/tests/population/comparison_result.dat b/tests/population/scaling/comparison_result_laptop.dat
similarity index 100%
rename from tests/population/comparison_result.dat
rename to tests/population/scaling/comparison_result_laptop.dat
diff --git a/tests/population/scaling/comparison_result_test_lapto.dat b/tests/population/scaling/comparison_result_test_lapto.dat
new file mode 100644
index 0000000000000000000000000000000000000000..bdb6b72f9edabd770d5e8d557b079bc63430ab1e
--- /dev/null
+++ b/tests/population/scaling/comparison_result_test_lapto.dat
@@ -0,0 +1,3 @@
+(2, 100, 9.662783145904541, 5.593015670776367, 1.7276517204113695)
+(2, 225, 22.48796582221985, 17.04386305809021, 1.319417185269245)
+(2, 225, 22.703202962875366, 16.048972845077515, 1.4146203113452718)
diff --git a/tests/population/scaling/evolve_population_comparing_with_multiprocessing.py b/tests/population/scaling/evolve_population_comparing_with_multiprocessing.py
new file mode 100644
index 0000000000000000000000000000000000000000..2a11d4ce44b47839735c9d2a52ce22cfc3af3ba1
--- /dev/null
+++ b/tests/population/scaling/evolve_population_comparing_with_multiprocessing.py
@@ -0,0 +1,152 @@
+import os
+import json
+import time
+import pickle
+import sys
+import numpy as np
+
+from binarycpython.utils.grid import Population
+from binarycpython.utils.functions import get_help_all, get_help, create_hdf5
+
+
+import argparse
+
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument(
+    "resolution_M_1", help="Resolution in M_1",
+)
+
+parser.add_argument(
+    "resolution_per", help="Resolution in period",
+)
+
+parser.add_argument(
+    "amt_cores", help="The amount of nodes that are used for the multiprocessing",
+)
+
+parser.add_argument(
+    "name_testcase", help="The name of the testcase (e.g. laptop, cluster etc)",
+)
+
+
+args = parser.parse_args()
+
+res_m_1 = int(args.resolution_M_1)
+res_per = int(args.resolution_per)
+AMT_CORES = int(args.amt_cores)
+name_testcase = args.name_testcase
+
+##############################################################################
+## Quick script to get some output about which stars go supernova when.
+def output_lines(output):
+    """
+    Function that outputs the lines that were recieved from the binary_c run. 
+    """
+    return output.splitlines()
+
+
+def parse_function(self, output):
+    # extract info from the population instance
+    # TODO: think about whether this is smart. Passing around this object might be an overkill
+
+    # Get some information from the
+    data_dir = self.custom_options["data_dir"]
+    base_filename = self.custom_options["base_filename"]
+
+    # Check directory, make if necessary
+    os.makedirs(data_dir, exist_ok=True)
+
+    # Create filename
+    outfilename = os.path.join(data_dir, base_filename)
+
+    # Go over the output.
+    for el in output_lines(output):
+        headerline = el.split()[0]
+
+        # CHeck the header and act accordingly
+        if headerline == "DAVID_SN":
+            parameters = ["time", "mass_1", "prev_mass_1", "zams_mass_1", "SN_type"]
+            values = el.split()[1:]
+            seperator = "\t"
+
+            if not os.path.exists(outfilename):
+                with open(outfilename, "w") as f:
+                    f.write(seperator.join(parameters) + "\n")
+
+            with open(outfilename, "a") as f:
+                f.write(seperator.join(values) + "\n")
+
+
+resolution = {'M_1': res_m_1, 'per': res_per}
+total_systems = np.prod([el for el in resolution.values()])
+# AMT_CORES = int(amt_cores)
+
+
+test_pop = Population()
+
+test_pop.set(verbose=1, 
+    amt_cores=AMT_CORES,
+    binary=1,
+)
+
+test_pop.add_grid_variable(
+    name="M_1",
+    longname="log primary mass",
+    valuerange=[1, 100],
+    resolution="{}".format(resolution['M_1']),
+    spacingfunc="const(1, 100, {})".format(resolution['M_1']),
+    precode="",
+    # precode="M_1=math.exp(lnm1)",
+    probdist="Kroupa2001(M_1)",
+    # probdist='self.custom_options["extra_prob_function"](M_1)',
+    dphasevol="dlnm1",
+    parameter_name="M_1",
+    condition="",
+)
+
+### Grid generating test.
+test_pop.add_grid_variable(
+    name="period",
+    longname="period",
+    valuerange=["M_1", 20],
+    resolution="{}".format(resolution['per']),
+    spacingfunc="np.linspace(1, 10, {})".format(resolution['per']),
+    precode="orbital_period = period**2",
+    probdist="flat(orbital_period)",
+    parameter_name="orbital_period",
+    dphasevol="dper",
+    condition='self.grid_options["binary"]==1',
+)
+
+# Lin
+total_lin_start = time.time()
+
+evolve_lin_time = test_pop.test_evolve_population_lin()
+
+total_lin_stop = time.time()
+
+total_lin = total_lin_stop - total_lin_start
+
+print("linear run with {} systems: {} of which {} spent on evolving the systems".format(total_systems, total_lin, evolve_lin_time))
+
+# MP
+total_mp_start = time.time()
+
+evolve_mp_time = test_pop.test_evolve_population_mp()
+
+total_mp_stop = time.time()
+
+total_mp = total_mp_stop - total_mp_start
+print("MP ({} nodes) run with {} systems: {} of which {} spent on evolving the systems".format(AMT_CORES, total_systems, total_mp, evolve_mp_time))
+
+speed_up = total_lin/total_mp
+print("The speed up by using MP is: {}".format(total_lin/total_mp))
+
+
+# Write to file:
+# amt_cores, amt_systems, total_time_lin, total_time_mp, speedup
+with open("comparison_result_{}.dat".format(name_testcase), "a") as f:
+    res = (AMT_CORES, total_systems, total_lin, total_mp, speed_up)
+    f.write(str(res) + "\n")
\ No newline at end of file
diff --git a/tests/population/scaling/plot_scaling.py b/tests/population/scaling/plot_scaling.py
new file mode 100644
index 0000000000000000000000000000000000000000..f0061147f120b1b5cdd3e5c7c2a8cfe7dcf82ceb
--- /dev/null
+++ b/tests/population/scaling/plot_scaling.py
@@ -0,0 +1,221 @@
+import matplotlib
+import os
+import matplotlib.pyplot as plt
+import pandas as pd
+import numpy as np
+
+
+def calc_mean_and_std(arr):
+    return np.mean(arr), np.std(arr)
+
+def calculate_results(results, unique_amt_cores, unique_amt_systems):
+    """
+    Function to calculate the numbers
+    """
+    
+    calculated_results = []
+    for i in unique_amt_cores:
+        for j in unique_amt_systems:
+            (
+                total_time_sequential_list,
+                total_time_multiprocessing_list,
+                total_ratio_list,
+            ) = ([], [], [])
+
+            for res in results:
+                if (res[0] == i) & (res[1] == j):
+                    total_time_sequential_list.append(res[2])
+                    total_time_multiprocessing_list.append(res[3])
+                    total_ratio_list.append(res[4])
+
+            if (
+                (total_time_sequential_list)
+                and (total_time_multiprocessing_list)
+                and (total_ratio_list)
+            ):
+                # calculate stuff
+                mean_time_sequential, std_sequential = calc_mean_and_std(
+                    np.array(total_time_sequential_list)
+                )
+                mean_time_multiprocessing, std_multiprocessing = calc_mean_and_std(
+                    np.array(total_time_multiprocessing_list)
+                )
+                mean_ratio, std_ratio = calc_mean_and_std(np.array(total_ratio_list))
+
+                # make dict
+                res_dict = {
+                    "cores": i,
+                    "systems": j,
+                    "mean_time_sequential": mean_time_sequential,
+                    "mean_time_multiprocessing": mean_time_multiprocessing,
+                    "mean_ratio": mean_ratio,
+                    "std_sequential": std_sequential,
+                    "std_multiprocessing": std_multiprocessing,
+                    "std_ratio": std_ratio,
+                    "total_runs": len(total_time_sequential_list),
+                }
+
+                calculated_results.append(res_dict)
+
+    return calculated_results
+
+def plot_speedup_and_efficiency(calculated_results, unique_amt_cores, unique_amt_systems):
+    x_position_shift = 0
+    y_position_shift = -0.05
+    max_speedup = 0
+
+    # https://stackoverflow.com/questions/46323530/matplotlib-plot-two-x-axes-one-linear-and-one-with-logarithmic-ticks
+    fig, ax1 = plt.subplots()
+    ax2 = ax1.twinx()
+    for amt_systems in unique_amt_systems:
+
+        cores = []
+        speedup = []
+        std = []
+        efficiency = []
+
+        for el in calculated_results:
+            if el["systems"] == amt_systems:
+
+                cores.append(el["cores"] + x_position_shift)
+                speedup.append(el["mean_ratio"])
+                std.append(el["std_ratio"])
+                efficiency.append(el["mean_ratio"] / el["cores"])
+
+                if el["mean_ratio"] > max_speedup:
+                    max_speedup = el["mean_ratio"]
+
+                # add number of runs its based on
+                ax1.text(
+                    el["cores"] + x_position_shift + 0.01,
+                    el["mean_ratio"] + y_position_shift,
+                    el["total_runs"],
+                )
+
+        ax1.errorbar(
+            cores,
+            speedup,
+            std,
+            linestyle="None",
+            marker="^",
+            label="Speed up & efficiency of {} systems".format(amt_systems),
+        )
+        
+        ax2.plot(cores, efficiency, alpha=0.5)
+        x_position_shift += 0.1
+
+    ax1.set_title(
+        "Speed up ratio vs amount of cores for different amounts of systems on {}".format(
+            name_testcase
+        )
+    )
+    ax1.set_xlabel("Amount of cores used")
+    ax1.set_ylabel("Speed up ratio (time_linear/time_parallel)")
+
+    ax1.set_xlim(0, max(unique_amt_cores) + 4)
+    ax1.set_ylim(0, max_speedup + 2)
+    ax2.set_ylim(0, 1)
+
+    ax1.grid()
+    ax1.legend(loc=4)
+    fig.savefig(os.path.join(img_dir, "speedup_scaling_{}.{}".format(name_testcase, "png")))
+    fig.savefig(os.path.join(img_dir, "speedup_scaling_{}.{}".format(name_testcase, "pdf")))
+    fig.savefig(os.path.join(img_dir, "speedup_scaling_{}.{}".format(name_testcase, "eps")))
+    plt.show()
+
+def plot_runtime(calculated_results, unique_amt_cores, unique_amt_systems):
+    fig = plt.figure()
+    ax = fig.add_subplot(111)
+
+    color_options = ['blue', 'green', 'red', 'black', 'yellow', 'magenta']
+
+    x_position_shift = 0
+    y_position_shift = -0.05
+
+    colors = [color_options[i] for i in range(len(unique_amt_systems))]
+    for i, amt_systems in enumerate(unique_amt_systems):
+
+        cores = []
+
+        mean_times_linear = []
+        mean_times_multiprocessing = []
+        std_linear = []
+        std_mp = []
+
+        for el in calculated_results:
+            if el['systems']==amt_systems:
+                cores.append(el['cores'] + x_position_shift)
+
+                mean_times_linear.append(el['mean_time_sequential'])
+                std_linear.append(el['std_sequential'])
+
+                mean_times_multiprocessing.append(el['mean_time_multiprocessing'])
+                std_mp.append(el['std_multiprocessing'])
+
+                # # add number of runs its based on
+                # plt.text(el['cores'] + x_position_shift+0.01, el['mean_ratio']+y_position_shift, el['total_runs'])
+
+        ax.errorbar(cores, mean_times_linear, std_linear, color=colors[i], linewidth=1, marker='P', label='{} systems linear run'.format(amt_systems))
+        ax.errorbar(cores, mean_times_multiprocessing, std_mp, linewidth=1, color=colors[i],  marker='d', label='{} systems MP run'.format(amt_systems))
+        x_position_shift += 0.1
+
+    ax.set_title("Total time for sequential and MP for different amounts of systems on {}".format(name_testcase))
+    ax.set_xlabel("Amount of cores used")
+    ax.set_ylabel("Total time taken (s)")
+
+    ax.set_xlim(0, max(unique_amt_cores) + 20)
+    ax.set_yscale('log')
+    ax.grid()
+    ax.legend()
+
+    fig.savefig(os.path.join(img_dir, 'total_time_scaling_{}.{}'.format(name_testcase, 'png')))
+    fig.savefig(os.path.join(img_dir, 'total_time_scaling_{}.{}'.format(name_testcase, 'pdf')))
+    fig.savefig(os.path.join(img_dir, 'total_time_scaling_{}.{}'.format(name_testcase, 'eps')))
+
+    plt.show()
+
+
+
+##################### 
+
+# Configure
+result_file = "comparison_result_laptop.dat"
+# result_file = "comparison_result_astro1.dat"
+name_testcase = "laptop"
+img_dir = "scaling_plots"
+
+# Readout file
+results = []
+with open(result_file, "r") as f:
+    for line in f:
+        res = list(eval(line.strip()))
+        if len(res) == 6:
+            res.append(res[-2] / res[-1])
+        results.append(res)
+
+# make dataframe and set correct headers.
+headers = [
+    "cores",
+    "total_systems",
+    "total_time_sequentially",
+    "total_time_multiprocessing",
+    "ratio",
+]
+
+df = pd.DataFrame(results)
+df.columns = headers
+
+# Select unique amounts
+unique_amt_cores = df["cores"].unique()
+unique_amt_systems = df["total_systems"].unique()
+
+# Create dictionary with calculated means and stdevs etc
+calculated_results = calculate_results(results, unique_amt_cores, unique_amt_systems)
+
+#############################################################################################################################
+# Plot speed up  and efficiency.
+plot_speedup_and_efficiency(calculated_results, unique_amt_cores, unique_amt_systems)
+
+########################################################################################
+# Plot run time
+plot_runtime(calculated_results, unique_amt_cores, unique_amt_systems)
\ No newline at end of file
diff --git a/tests/population/run_tests.sh b/tests/population/scaling/run_tests.sh
similarity index 100%
rename from tests/population/run_tests.sh
rename to tests/population/scaling/run_tests.sh
diff --git a/tests/population/run_tests_astro1.sh b/tests/population/scaling/run_tests_astro1.sh
similarity index 100%
rename from tests/population/run_tests_astro1.sh
rename to tests/population/scaling/run_tests_astro1.sh
diff --git a/tests/population/scaling/run_tests_new.sh b/tests/population/scaling/run_tests_new.sh
new file mode 100755
index 0000000000000000000000000000000000000000..ad5386a13f0205b78d1aa72b5cdc72e3fcabbe0b
--- /dev/null
+++ b/tests/population/scaling/run_tests_new.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+python evolve_population_comparing_with_multiprocessing.py 25 25 2 "${HOSTNAME}"
+