Skip to content
Snippets Groups Projects
Commit c6d40f68 authored by VasilyShcherbinin's avatar VasilyShcherbinin
Browse files

Latest changes

parent b7bab10e
No related branches found
No related tags found
No related merge requests found
Accuracy: 0.78125 Total time: 114.03 Rules: 360 Accuracy: 0.90625 Total time: 33.04 Rules: 199
Total Av. Rules: 247.06666666666666 Accuracy: 0.921875 Total time: 34.97 Rules: 194
Accuracy: 0.78125 Total time: 114.62 Rules: 365 Accuracy: 0.859375 Total time: 35.2 Rules: 197
Total Av. Rules: 488.4 Accuracy: 0.859375 Total time: 36.26 Rules: 193
Accuracy: 0.890625 Total time: 34.05 Rules: 205
Accuracy: 0.875 Total time: 34.18 Rules: 201
Accuracy: 0.921875 Total time: 34.91 Rules: 218
Accuracy: 0.90625 Total time: 35.54 Rules: 202
Accuracy: 0.890625 Total time: 35.22 Rules: 205
Accuracy: 0.828125 Total time: 37.87 Rules: 216
...@@ -8,9 +8,9 @@ trainFile=6Multiplexer_Data_Complete.txt # FileName of training dataset ...@@ -8,9 +8,9 @@ trainFile=6Multiplexer_Data_Complete.txt # FileName of training dataset
testFile=None # FileName of testing dataset. If no testing data available or desired, put 'None'. testFile=None # FileName of testing dataset. If no testing data available or desired, put 'None'.
outputDirectory=Local_Output # Output file directory outputDirectory=Local_Output # Output file directory
outputFile=ExampleRun # FileName of output files. outputFile=ExampleRun # FileName of output files.
learningIterations=20000 # Specify complete algorithm evaluation checkpoints and maximum number of learning iterations (e.g. 1000.2000.5000 = A maximum of 5000 learning iterations with evaluations at 1000, 2000, and 5000 iterations) learningIterations=10000 # Specify complete algorithm evaluation checkpoints and maximum number of learning iterations (e.g. 1000.2000.5000 = A maximum of 5000 learning iterations with evaluations at 1000, 2000, and 5000 iterations)
N=1000 # Maximum size of the rule population (a.k.a. Micro-classifier population size, where N is the sum of the classifier numerosities in the population) N=500 # Maximum size of the rule population (a.k.a. Micro-classifier population size, where N is the sum of the classifier numerosities in the population)
p_spec=0.7 # The probability of specifying an attribute when covering. (1-p_spec = the probability of adding '#' in ternary rule representations). Greater numbers of attributes in a dataset will require lower values of p_spec. p_spec=0.6 # The probability of specifying an attribute when covering. (1-p_spec = the probability of adding '#' in ternary rule representations). Greater numbers of attributes in a dataset will require lower values of p_spec.
kfold=5 # if not used, set to 0. kfold=5 # if not used, set to 0.
######-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ######--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
...@@ -26,9 +26,9 @@ trackingFrequency=0 # Specifies the number of iterations before each esti ...@@ -26,9 +26,9 @@ trackingFrequency=0 # Specifies the number of iterations before each esti
######-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ######--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
###### Supervised Learning Parameters - Generally just use default values. ###### Supervised Learning Parameters - Generally just use default values.
######-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ######--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
nu=5 # (v) Power parameter used to determine the importance of high accuracy when calculating fitness. (typically set to 5, recommended setting of 1 in noisy data) nu=10 # (v) Power parameter used to determine the importance of high accuracy when calculating fitness. (typically set to 5, recommended setting of 1 in noisy data)
chi=0.8 # (X) The probability of applying crossover in the GA. (typically set to 0.5-1.0) chi=0.8 # (X) The probability of applying crossover in the GA. (typically set to 0.5-1.0)
upsilon=0.4 # (u) The probability of mutating an allele within an offspring.(typically set to 0.1-0.5) upsilon=0.5 # (u) The probability of mutating an allele within an offspring.(typically set to 0.1-0.5)
theta_GA=25 # The GA threshold; The GA is applied in a set when the average time since the last GA in the set is greater than theta_GA. theta_GA=25 # The GA threshold; The GA is applied in a set when the average time since the last GA in the set is greater than theta_GA.
theta_del=20 # The deletion experience threshold; The calculation of the deletion probability changes once this threshold is passed. theta_del=20 # The deletion experience threshold; The calculation of the deletion probability changes once this threshold is passed.
theta_sub=20 # The subsumption experience threshold; theta_sub=20 # The subsumption experience threshold;
......
...@@ -34,7 +34,6 @@ from UCS.UCS_Constants import * ...@@ -34,7 +34,6 @@ from UCS.UCS_Constants import *
class OutputFileManager: class OutputFileManager:
totalPopulationSize = 0 totalPopulationSize = 0
totalAverage = 0
def writePopStats(self, outFile, trainEval, testEval, exploreIter, pop, correct): def writePopStats(self, outFile, trainEval, testEval, exploreIter, pop, correct):
""" Makes output text file which includes all of the evaluation statistics for a complete analysis of all training and testing data on the current LCS rule population. """ """ Makes output text file which includes all of the evaluation statistics for a complete analysis of all training and testing data on the current LCS rule population. """
...@@ -140,7 +139,6 @@ class OutputFileManager: ...@@ -140,7 +139,6 @@ class OutputFileManager:
# Write each classifier-------------------------------------------------------------------------------------------------------------------------------------- # Write each classifier--------------------------------------------------------------------------------------------------------------------------------------
OutputFileManager.totalPopulationSize = len(pop.popSet) OutputFileManager.totalPopulationSize = len(pop.popSet)
OutputFileManager.totalAverage += len(pop.popSet)
print(len(pop.popSet)) print(len(pop.popSet))
for cl in pop.popSet: for cl in pop.popSet:
rulePopOut.write(str(cl.printClassifier())) rulePopOut.write(str(cl.printClassifier()))
......
...@@ -85,11 +85,10 @@ def mainRun(): ...@@ -85,11 +85,10 @@ def mainRun():
print("Total run time in seconds: %.2f" % total) print("Total run time in seconds: %.2f" % total)
f = open("RESULTS_FILE.txt", 'a') f = open("RESULTS_FILE.txt", 'a')
f.write(" Accuracy: " + str(kfold_accuracy) + " Total time: " + str(total) + " Rules: " + str(OutputFileManager.totalPopulationSize) + "\n") f.write(" Accuracy: " + str(kfold_accuracy) + " Total time: " + str(total) + " Rules: " + str(OutputFileManager.totalPopulationSize) + "\n")
f.write(" Total Av. Rules: " + str(OutputFileManager.totalAverage / 15) + "\n")
if __name__ == '__main__': if __name__ == '__main__':
for i in range(15): for i in range(10):
mainRun() mainRun()
......
No preview for this file type
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment