Skip to content

Commit 0f1f598

Browse files
Spelling swapped to US, renamed some classes, removed long testing for travis compatibility
1 parent fdafb22 commit 0f1f598

File tree

2 files changed

+20
-57
lines changed

2 files changed

+20
-57
lines changed

src/qinfer/hyper_heuristic_optimisers.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -38,9 +38,9 @@
3838

3939
## CLASSES ####################################################################
4040

41-
class Optimiser(object):
41+
class Optimizer(object):
4242
'''
43-
A generic optimiser class that is inherited by the other optimisation functions.
43+
A generic optimizer class that is inherited by the other optimisation functions.
4444
4545
:param np.ndarray param_names: The list of parameters that are being searched over.
4646
:param function fitness_function: The function that is being optimised over, defaults to perf test multiple
@@ -64,7 +64,7 @@ def __init__(
6464
self._funct_kwargs = funct_kwargs
6565

6666
if fitness_function is None: # Default to calling perf test multiple
67-
self._fitness_function = PerfTestMultipleAbstractor(
67+
self._fitness_function = HeuristicPerformanceFitness(
6868
self._param_names,
6969
*self._funct_args,
7070
**self._funct_kwargs
@@ -77,9 +77,9 @@ def fitness_function(self, params):
7777
return self._fitness_function(params)
7878

7979
def parallel(self):
80-
raise NotImplementedError("This optimiser does not have parallel support.")
80+
raise NotImplementedError("This optimizer does not have parallel support.")
8181

82-
class ParticleSwarmOptimiser(Optimiser):
82+
class ParticleSwarmOptimizer(Optimizer):
8383
'''
8484
A particle swarm optimisation based hyperheuristic
8585
:param integer n_pso_iterations:
@@ -200,7 +200,7 @@ def fitness_dt(self):
200200
('fitness', np.float64)])
201201

202202

203-
class ParticleSwarmSimpleAnnealingOptimiser(ParticleSwarmOptimiser):
203+
class ParticleSwarmSimpleAnnealingOptimizer(ParticleSwarmOptimizer):
204204

205205
def __call__(self,
206206
n_pso_iterations=50,
@@ -293,7 +293,7 @@ def update_pso_params(self, temperature, omega_v, phi_p, phi_g):
293293
return omega_v, phi_p, phi_g
294294

295295

296-
class ParticleSwarmTemperingOptimiser(ParticleSwarmOptimiser):
296+
class ParticleSwarmTemperingOptimizer(ParticleSwarmOptimizer):
297297
'''
298298
A particle swarm optimisation based hyperheuristic
299299
:param integer n_pso_iterations:
@@ -444,7 +444,7 @@ def distribute_particles(self, n_pso_particles, n_temper_categories):
444444

445445
return temper_map
446446

447-
class PerfTestMultipleAbstractor(object):
447+
class HeuristicPerformanceFitness(object):
448448
def __init__(self,
449449
param_names,
450450
evaluation_function = None,

src/qinfer/tests/test_optimiser.py

Lines changed: 12 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -44,17 +44,17 @@ class TestPSO(DerandomizedTestCase):
4444

4545
def test_pso_quad(self):
4646
f_quad = lambda x: numpy.sum(10 * (x-0.5)**2)
47-
hh_opt = ParticleSwarmOptimiser(['x','y','z','a'], fitness_function = f_quad)
47+
hh_opt = ParticleSwarmOptimizer(['x','y','z','a'], fitness_function = f_quad)
4848
hh_opt()
4949

5050
def test_pso_sin_sq(self):
5151
f_sin_sq = lambda x: numpy.sum(np.sin(x - 0.2)**2)
52-
hh_opt = ParticleSwarmOptimiser(['x','y','z','a'], fitness_function = f_sin_sq)
52+
hh_opt = ParticleSwarmOptimizer(['x','y','z','a'], fitness_function = f_sin_sq)
5353
hh_opt()
5454

5555
def test_pso_rosenbrock(self):
5656
f_rosenbrock = lambda x: numpy.sum([((x[i+1] - x[i]**2)**2 + (1 - x[i])**2)/len(x) for i in range(len(x)-1)])
57-
hh_opt = ParticleSwarmOptimiser(['x','y','z','a'], fitness_function = f_rosenbrock)
57+
hh_opt = ParticleSwarmOptimizer(['x','y','z','a'], fitness_function = f_rosenbrock)
5858
hh_opt()
5959

6060

@@ -84,7 +84,7 @@ def test_pso_perf_test_multiple_short(self):
8484
#Fitness function to evaluate the performance of the experiment
8585
EXPERIMENT_FITNESS = lambda performance: performance['loss'][:,-1].mean(axis=0)
8686

87-
hh_opt = ParticleSwarmOptimiser(params,
87+
hh_opt = ParticleSwarmOptimizer(params,
8888
n_trials = n_trials,
8989
n_particles = n_particles,
9090
prior = prior,
@@ -95,58 +95,21 @@ def test_pso_perf_test_multiple_short(self):
9595
hh_opt(n_pso_iterations=5,
9696
n_pso_particles=6)
9797

98-
99-
def test_pso_perf_test_multiple_long(self):
100-
# Define our experiment
101-
n_trials = 20 # Times we repeat the set of experiments
102-
n_exp = 150 # Number of experiments in the set
103-
n_particles = 4000 # Number of points we track during the experiment
104-
105-
# Model for the experiment
106-
model = rb.RandomizedBenchmarkingModel()
107-
108-
#Ordering of RB is 'p', 'A', 'B'
109-
# A + B < 1, 0 < p < 1
110-
#Prior distribution of the experiment
111-
prior = dist.PostselectedDistribution(
112-
dist.MultivariateNormalDistribution(mean=[0.5,0.1,0.25], cov=np.diag([0.1, 0.1, 0.1])),
113-
model
114-
)
115-
116-
#Heuristic used in the experiment
117-
heuristic_class = qi.expdesign.ExpSparseHeuristic
118-
119-
#Heuristic Parameters
120-
params = ['base', 'scale']
121-
122-
#Fitness function to evaluate the performance of the experiment
123-
EXPERIMENT_FITNESS = lambda performance: performance['loss'][:,-1].mean(axis=0)
124-
125-
hh_opt = ParticleSwarmOptimiser(params,
126-
n_trials = n_trials,
127-
n_particles = n_particles,
128-
prior = prior,
129-
model = model,
130-
n_exp = n_exp,
131-
heuristic_class = heuristic_class
132-
)
133-
hh_opt()
134-
13598
def TestPSSAO(DerandomizedTestCase):
13699

137100
def test_pssao_quad(self):
138101
f_quad = lambda x: numpy.sum(10 * (x-0.5)**2)
139-
hh_opt = ParticleSwarmSimpleAnnealingOptimiser(['x','y','z','a'], fitness_function = f_quad)
102+
hh_opt = ParticleSwarmSimpleAnnealingOptimizer(['x','y','z','a'], fitness_function = f_quad)
140103
hh_opt()
141104

142105
def test_pssao_sin_sq(self):
143106
f_sin_sq = lambda x: numpy.sum(np.sin(x - 0.2)**2)
144-
hh_opt = ParticleSwarmSimpleAnnealingOptimiser(['x','y','z','a'], fitness_function = f_sin_sq)
107+
hh_opt = ParticleSwarmSimpleAnnealingOptimizer(['x','y','z','a'], fitness_function = f_sin_sq)
145108
hh_opt()
146109

147110
def test_pssao_rosenbrock(self):
148111
f_rosenbrock = lambda x: numpy.sum([((x[i+1] - x[i]**2)**2 + (1 - x[i])**2)/len(x) for i in range(len(x)-1)])
149-
hh_opt = ParticleSwarmSimpleAnnealingOptimiser(['x','y','z','a'], fitness_function = f_rosenbrock)
112+
hh_opt = ParticleSwarmSimpleAnnealingOptimizer(['x','y','z','a'], fitness_function = f_rosenbrock)
150113
hh_opt()
151114

152115

@@ -176,7 +139,7 @@ def test_pssao_perf_test_multiple_short(self):
176139
#Fitness function to evaluate the performance of the experiment
177140
EXPERIMENT_FITNESS = lambda performance: performance['loss'][:,-1].mean(axis=0)
178141

179-
hh_opt = ParticleSwarmSimpleAnnealingOptimiser(params,
142+
hh_opt = ParticleSwarmSimpleAnnealingOptimizer(params,
180143
n_trials = n_trials,
181144
n_particles = n_particles,
182145
prior = prior,
@@ -192,17 +155,17 @@ def TestPSTO(DerandomizedTestCase):
192155

193156
def test_psto_quad(self):
194157
f_quad = lambda x: numpy.sum(10 * (x-0.5)**2)
195-
hh_opt = ParticleSwarmTemperingOptimiser(['x','y','z','a'], fitness_function = f_quad)
158+
hh_opt = ParticleSwarmTemperingOptimizer(['x','y','z','a'], fitness_function = f_quad)
196159
hh_opt()
197160

198161
def test_psto_sin_sq(self):
199162
f_sin_sq = lambda x: numpy.sum(np.sin(x - 0.2)**2)
200-
hh_opt = ParticleSwarmTemperingOptimiser(['x','y','z','a'], fitness_function = f_sin_sq)
163+
hh_opt = ParticleSwarmTemperingOptimizer(['x','y','z','a'], fitness_function = f_sin_sq)
201164
hh_opt()
202165

203166
def test_psto_rosenbrock(self):
204167
f_rosenbrock = lambda x: numpy.sum([((x[i+1] - x[i]**2)**2 + (1 - x[i])**2)/len(x) for i in range(len(x)-1)])
205-
hh_opt = ParticleSwarmTemperingOptimiser(['x','y','z','a'], fitness_function = f_rosenbrock)
168+
hh_opt = ParticleSwarmTemperingOptimizer(['x','y','z','a'], fitness_function = f_rosenbrock)
206169
hh_opt()
207170

208171

@@ -232,7 +195,7 @@ def test_psto_perf_test_multiple_short(self):
232195
#Fitness function to evaluate the performance of the experiment
233196
EXPERIMENT_FITNESS = lambda performance: performance['loss'][:,-1].mean(axis=0)
234197

235-
hh_opt = ParticleSwarmTemperingOptimiser(params,
198+
hh_opt = ParticleSwarmTemperingOptimizer(params,
236199
n_trials = n_trials,
237200
n_particles = n_particles,
238201
prior = prior,

0 commit comments

Comments
 (0)