Skip to content

Commit

Permalink
1.1.9
Browse files Browse the repository at this point in the history
  • Loading branch information
robertfrankzhang committed May 10, 2020
1 parent 4722d51 commit d286b32
Show file tree
Hide file tree
Showing 6 changed files with 60 additions and 2,059 deletions.
2,096 changes: 50 additions & 2,046 deletions eLCSPerformanceTests/eLCS Performance Comparison.ipynb

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions eLCSPerformanceTests/eLCS_Classifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ def Mutation(self, state, phenotype):
#-------------------------------------------------------
for attRef in range(cons.env.formatData.numAttributes): #Each condition specifies different attributes, so we need to go through all attributes in the dataset.
attributeInfo = cons.env.formatData.attributeInfo[attRef]
if random.random() < cons.mu and state[attRef] != cons.labelMissingData:
if random.random() < cons.upsilon and state[attRef] != cons.labelMissingData:
#MUTATION--------------------------------------------------------------------------------------------------------------
if attRef not in self.specifiedAttList: #Attribute not yet specified
self.specifiedAttList.append(attRef)
Expand Down Expand Up @@ -372,7 +372,7 @@ def Mutation(self, state, phenotype):
def discretePhenotypeMutation(self):
""" Mutate this rule's discrete phenotype. """
changed = False
if random.random() < cons.mu:
if random.random() < cons.upsilon:
phenotypeList = copy.deepcopy(cons.env.formatData.phenotypeList)
phenotypeList.remove(self.phenotype)
newPhenotype = random.sample(phenotypeList,1)
Expand All @@ -385,7 +385,7 @@ def discretePhenotypeMutation(self):
def continuousPhenotypeMutation(self, phenotype):
""" Mutate this rule's continuous phenotype. """
changed = False
if random.random() < cons.mu: #Mutate continuous phenotype
if random.random() < cons.upsilon: #Mutate continuous phenotype
phenRange = self.phenotype[1] - self.phenotype[0]
mutateRange = random.random()*0.5*phenRange
tempKey = random.randint(0,2) #Make random choice between 3 scenarios, mutate minimums, mutate maximums, mutate both
Expand Down
6 changes: 3 additions & 3 deletions eLCSPerformanceTests/eLCS_ClassifierSet.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,12 +279,12 @@ def runGA(self, exploreIter, state, phenotype):
#-------------------------------------------------------
if changed:
cl1.setAccuracy((cl1.accuracy + cl2.accuracy)/2.0)
cl1.setFitness(cons.fitness_reduction * (cl1.fitness + cl2.fitness)/2.0)
cl1.setFitness(cons.fitnessReduction * (cl1.fitness + cl2.fitness)/2.0)
cl2.setAccuracy(cl1.accuracy)
cl2.setFitness(cl1.fitness)
else:
cl1.setFitness(cons.fitness_reduction * cl1.fitness)
cl2.setFitness(cons.fitness_reduction * cl2.fitness)
cl1.setFitness(cons.fitnessReduction * cl1.fitness)
cl2.setFitness(cons.fitnessReduction * cl2.fitness)

#-------------------------------------------------------
# MUTATION OPERATOR
Expand Down
4 changes: 2 additions & 2 deletions eLCSPerformanceTests/eLCS_ParamParser.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

class ParamParser:
def __init__(self, dataFile, cv=False,learningIterations="10000", trackingFrequency=100000, N=1000,
p_spec=0.5, discrete_attribute_limit=10,nu=5, chi=0.8, upsilon=0.04, theta_GA=25,
p_spec=0.5, discreteAttributeLimit=10,nu=5, chi=0.8, upsilon=0.04, theta_GA=25,
theta_del=20, theta_sub=20, acc_sub=0.99, beta=0.2, delta=0.1, init_fit=0.01, fitnessReduction=0.1,
doSubsumption=True, selectionMethod='tournament', theta_sel=0.5,randomSeed = False,labelInstanceID='InstanceID',labelPhenotype="class",
labelMissingData="NA",doPopulationReboot=False,popRebootPath='ExampleRun_eLCS_50000'):
Expand All @@ -17,7 +17,7 @@ def __init__(self, dataFile, cv=False,learningIterations="10000", trackingFreque
self.parameters['trackingFrequency'] = trackingFrequency
self.parameters['N'] = N
self.parameters['p_spec'] = p_spec
self.parameters['discrete_attribute_limit'] = discrete_attribute_limit
self.parameters['discreteAttributeLimit'] = discreteAttributeLimit
self.parameters['nu'] = nu
self.parameters['chi'] = chi
self.parameters['upsilon'] = upsilon
Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,14 @@
setup(
name = 'scikit-eLCS',
packages = ['skeLCS'],
version = '1.1.8',
version = '1.1.9',
license='License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
description = 'Educational Learning Classifier System',
long_description_content_type="text/markdown",
author = 'Robert Zhang, Ryan J. Urbanowicz',
author_email = 'robertzh@seas.upenn.edu,ryanurb@upenn.edu',
url = 'https://github.com/UrbsLab/scikit-eLCS',
download_url = 'https://github.com/UrbsLab/scikit-eLCS/archive/v_1.1.8.tar.gz',
download_url = 'https://github.com/UrbsLab/scikit-eLCS/archive/v_1.1.9.tar.gz',
keywords = ['machine learning','data analysis','data science','learning classifier systems'],
install_requires=['numpy','pandas','scikit-learn'],
classifiers=[
Expand Down
3 changes: 0 additions & 3 deletions skeLCS/eLCS.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,9 +264,6 @@ def fit(self, X, y):
Returns self
"""
#If trained already, raise Exception
if self.hasTrained:
raise Exception("Cannot train already trained model again")

# Check if X and Y are numeric
try:
Expand Down

0 comments on commit d286b32

Please sign in to comment.