Skip to content

Commit

Permalink
more changes with help from 2to3-3.5
Browse files Browse the repository at this point in the history
  • Loading branch information
avehtari committed Jun 8, 2016
1 parent 4a7f6fe commit 51630cd
Show file tree
Hide file tree
Showing 13 changed files with 42 additions and 44 deletions.
4 changes: 2 additions & 2 deletions GPyOpt/core/evaluators/batch_predictive.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ def compute_batch(self):
while k<self.batch_size:
X = np.vstack((X,X_new)) # update the sample within the batch
Y = np.vstack((Y,model_local.predict(X_new)[0]))
print(X,Y)
print((X,Y))
model_local.updateModel(X,Y,None,None)
print(model_local.model.X, model_local.model.Y)
print((model_local.model.X, model_local.model.Y))

print(X)
try: # this exception is included in case two equal points are selected in a batch, in this case the method stops
Expand Down
3 changes: 1 addition & 2 deletions GPyOpt/core/task/objective.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,12 +80,11 @@ def _syncronous_batch_evaluation(self,x):
according to the number of accesible cores.
"""
from multiprocessing import Process, Pipe
from itertools import izip

# --- parallel evaluation of the function
divided_samples = [x[i::self.n_procs] for i in range(self.n_procs)]
pipe = [Pipe() for i in range(self.n_procs)]
proc = [Process(target=spawn(self.func),args=(c,k)) for k,(p,c) in izip(divided_samples,pipe)]
proc = [Process(target=spawn(self.func),args=(c,k)) for k,(p,c) in zip(divided_samples,pipe)]
[p.start() for p in proc]
[p.join() for p in proc]

Expand Down
6 changes: 4 additions & 2 deletions GPyOpt/interface/config_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,10 @@

def update_config(config_new, config_default):

if any([isinstance(v, dict) for v in config_new.values()]):
for k,v in config_new.iteritems():
# if any([isinstance(v, dict) for v in config_new.values()]):
# for k,v in config_new.iteritems():
if any([isinstance(v, dict) for v in list(config_new.values())]):
for k,v in list(config_new.items()):
if isinstance(v,dict) and k in config_default:
update_config(config_new[k],config_default[k])
else:
Expand Down
3 changes: 2 additions & 1 deletion GPyOpt/interface/driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ def _get_bounds(self):

bounds = []
var = self.config['variables']
for k in var.keys():
# for k in var.keys():
for k in list(var.keys()):
assert var[k]['type'].lower().startswith('float'), 'Only real value variables are supported!'
bounds.extend([(float(var[k]['min']), float(var[k]['max']))]*int(var[k]['size']))
return bounds
Expand Down
6 changes: 3 additions & 3 deletions GPyOpt/interface/func_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def _load_param_config(self, config):
param_offsets = []
param_types = []
total_size = 0
for k, v in var.iteritems():
for k, v in list(var.items()):
param_names.append(k)
param_sizes.append(v['size'])
assert v['type'].lower().startswith('float'), 'Only real parameters are supported at the moment!'
Expand All @@ -48,14 +48,14 @@ def _create_obj_func(self):
def obj_func(x):
params = []
if len(x.shape)==1: x = x[None,:]
for i in xrange(len(self.param_sizes)):
for i in range(len(self.param_sizes)):
params.append(x[:,self.param_offsets[i]:self.param_offsets[i]+self.param_sizes[i]])

if self.support_multi_eval:
return self.orgfunc(*params)
else:
rts = np.empty((x.shape[0],1))
for i in xrange(x.shape[0]):
for i in range(x.shape[0]):
rts[i] = self.orgfunc(*[p[i] for p in params])
return rts
return obj_func
Expand Down
6 changes: 3 additions & 3 deletions GPyOpt/interface/output.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def __init__(self, config, outpath, prjname='',name=''):
try:
self.fileout = open(self.filename,'w')
except:
print('Data logger '+self.name+' fails to open the output file '+self.filename+'!')
print(('Data logger '+self.name+' fails to open the output file '+self.filename+'!'))

def close(self):
if self.fileout is not None:
Expand Down Expand Up @@ -100,7 +100,7 @@ def __init__(self, config):
self.Ys = []

# create all the data savers
self.data_savers = [self._support_savers[ds['type']](ds, config['prjpath'], config['experiment-name'],name) for name, ds in config['output'].iteritems() if isinstance(ds, dict)]
self.data_savers = [self._support_savers[ds['type']](ds, config['prjpath'], config['experiment-name'],name) for name, ds in list(config['output'].items()) if isinstance(ds, dict)]

self.clock = [ds.interval for ds in self.data_savers]

Expand All @@ -111,7 +111,7 @@ def append_iter(self, iters, elapsed_time, X, Y, bo, final=False):
self.Ys.append(Y)
self.offsets.append(X.shape[0])

for i in xrange(len(self.data_savers)):
for i in range(len(self.data_savers)):
if final:
if self.clock[i]==-1: self.clock[i] = 0
elif self.clock[i]>0: self.clock[i] += -1
Expand Down
34 changes: 17 additions & 17 deletions GPyOpt/methods/bayesian_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def __init__(self, f, domain = None, constrains = None, cost_withGradients = Non
# --- CHOOSE design space

if not hasattr(self,'domain'): ### XXXXXXXXXXXXXXXXXXXXXXXX NOTE: remove this line in next version to depreciate arguments
if domain == None and self.kwargs.has_key('bounds'):
if domain == None and 'bounds' in self.kwargs:
self.domain = bounds_to_space(kwargs['bounds'])
else:
self.domain = domain
Expand All @@ -164,7 +164,7 @@ def __init__(self, f, domain = None, constrains = None, cost_withGradients = Non

# --- CHOOSE objective function
self.f = f
if self.kwargs.has_key('objective_name'): self.objective_name = kwargs['objective_name']
if 'objective_name' in self.kwargs: self.objective_name = kwargs['objective_name']
else: self.objective_name = 'no_name'
self.batch_size = batch_size
self.num_cores = num_cores
Expand All @@ -189,7 +189,7 @@ def __init__(self, f, domain = None, constrains = None, cost_withGradients = Non
self.normalize_Y = normalize_Y

# If an istance of a GPyOpt model is passed (possibly user defined), it is used here:
if self.kwargs.has_key('model'):
if 'model' in self.kwargs:
if isinstance(kwargs['model'], GPyOpt.models.base.BOModel):
self.model = kwargs['model']
self.model_type = 'User defined model used.'
Expand Down Expand Up @@ -232,31 +232,31 @@ def _model_chooser(self):
"""

if not hasattr(self,'kernel'): ### XXXXXXXXXXXXXXXXXXXXXXXX NOTE: remove this line in next version to depreciate arguments
if self.kwargs.has_key('kernel'):
if 'kernel' in self.kwargs:
self.kernel = self.kwargs['kernel']
else:
self.kernel = None

if self.kwargs.has_key('noise_var'): self.noise_var = self.kwargs['noise_var']
if 'noise_var' in self.kwargs: self.noise_var = self.kwargs['noise_var']
else: self.noise_var = None

# --------
# --- Initilize GP model with MLE on the parameters
# --------
if self.model_type == 'GP' or self.model_type == 'sparseGP':
if self.kwargs.has_key('model_optimizer_type'): self.model_optimizer_type = self.kwargs['model_optimizer_type']
if 'model_optimizer_type' in self.kwargs: self.model_optimizer_type = self.kwargs['model_optimizer_type']
else: self.model_optimizer_type = 'lbfgs'


if not hasattr(self,'optimize_restarts'): ### XXXXXXXXXXXXXXXXXXXXXXXX NOTE: remove this line in next version to depreciate arguments
if self.kwargs.has_key('optimize_restarts'): self.optimize_restarts = self.kwargs['optimize_restarts']
if 'optimize_restarts' in self.kwargs: self.optimize_restarts = self.kwargs['optimize_restarts']
else: self.optimize_restarts = 5

if self.kwargs.has_key('max_iters'): self.max_iters = self.kwargs['max_iters']
if 'max_iters' in self.kwargs: self.max_iters = self.kwargs['max_iters']
else: self.max_iters = 1000

if not hasattr(self,'num_inducing'): ### XXXXXXXXXXXXXXXXXXXXXXXX NOTE: remove this line in next version to depreciate arguments
if self.kwargs.has_key('num_inducing'): self.num_inducing = self.kwargs['num_inducing']
if 'num_inducing' in self.kwargs: self.num_inducing = self.kwargs['num_inducing']
else: self.num_inducing = 10

if self.model_type == 'GP': self.sparse = False
Expand All @@ -268,19 +268,19 @@ def _model_chooser(self):
# --- Initilize GP model with MCMC on the parameters
# --------
elif self.model_type == 'GP_MCMC':
if self.kwargs.has_key('n_samples'): self.n_samples = self.kwargs['n_samples']
if 'n_samples' in self.kwargs: self.n_samples = self.kwargs['n_samples']
else: self.n_samples = 10

if self.kwargs.has_key('n_burnin'): self.n_burnin = self.kwargs['n_burnin']
if 'n_burnin' in self.kwargs: self.n_burnin = self.kwargs['n_burnin']
else: self.n_burnin = 100

if self.kwargs.has_key('subsample_interval'): self.subsample_interval = self.kwargs['subsample_interval']
if 'subsample_interval' in self.kwargs: self.subsample_interval = self.kwargs['subsample_interval']
else: self.subsample_interval =10

if self.kwargs.has_key('step_size'): self.step_size = self.kwargs['step_size']
if 'step_size' in self.kwargs: self.step_size = self.kwargs['step_size']
else: self.step_size = 1e-1

if self.kwargs.has_key('leapfrog_steps'): self.leapfrog_steps = self.kwargs['leapfrog_steps']
if 'leapfrog_steps' in self.kwargs: self.leapfrog_steps = self.kwargs['leapfrog_steps']
else: self.leapfrog_steps = 20

return GPModel_MCMC(self.kernel, self.noise_var, self.exact_feval, self.normalize_Y, self.n_samples, self.n_burnin, self.subsample_interval, self.step_size, self.leapfrog_steps, self.verbosity_model)
Expand All @@ -305,13 +305,13 @@ def _acquisition_chooser(self):
# --- Extract relevant parameters from the ***kwargs

if not hasattr(self,'acquisition_jitter'): ### XXXXXXXXXXXXXXXXXXXXXXXX NOTE: remove this line in next version to depreciate arguments
if self.kwargs.has_key('acquisition_jitter'):
if 'acquisition_jitter' in self.kwargs:
self.acquisition_jitter = self.kwargs['acquisition_jitter']
else:
self.acquisition_jitter = 0.01

if not hasattr(self,'acquisition_weight'): ### XXXXXXXXXXXXXXXXXXXXXXXX NOTE: remove this line in next version to depreciate arguments
if self.kwargs.has_key('acquisition_weight'):
if 'acquisition_weight' in self.kwargs:
self.acquisition_weight = self.kwargs['acquisition_weight']
else:
self.acquisition_weight = 2 ## TODO: implement the optimal rate (only for bandits)
Expand Down Expand Up @@ -358,7 +358,7 @@ def _evaluator_chooser(self):
Acquisition chooser from the available options. Guide the optimization through sequential or parallel evalutions of the objective.
"""

if self.kwargs.has_key('acquisition_transformation'):
if 'acquisition_transformation' in self.kwargs:
self.acquisition_transformation = self.kwargs['acquisition_transformation']
else:
self.acquisition_transformation = 'none'
Expand Down
5 changes: 2 additions & 3 deletions GPyOpt/models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,7 @@

import abc

class BOModel(object):
__metaclass__ = abc.ABCMeta
class BOModel(object, metaclass=abc.ABCMeta):
"""
The abstract Model for Bayesian Optimization
"""
Expand Down Expand Up @@ -32,4 +31,4 @@ def get_fmin(self):
"Get the minimum of the current model."
return



4 changes: 2 additions & 2 deletions GPyOpt/optimization/acquisition_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def __init__(self, space, optimizer='lbfgs', n_samples=5000, fast=True, random=T
self.optimizer_name = optimizer
self.kwargs = kwargs
self.optimizer = select_optimizer(self.optimizer_name)(space, **kwargs)
self.free_dims = range(space.dimensionality)
self.free_dims = list(range(space.dimensionality))
self.bounds = self.space.get_bounds()
self.subspace = self.space

Expand All @@ -108,7 +108,7 @@ def fix_dimensions(self, dims=None, values=None):
self.fixed_values = np.atleast_2d(values)

# -- restore to initial values
self.free_dims = range(self.space.dimensionality)
self.free_dims = list(range(self.space.dimensionality))
self.bounds = self.space.get_bounds()

# -- change free dimensions and remove bounds from fixed dimensions
Expand Down
5 changes: 1 addition & 4 deletions GPyOpt/plotting/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
# Copyright (c) 2012, GPyOpt authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)

try:
import plots_bo
except:
pass
from . import plots_bo
4 changes: 2 additions & 2 deletions GPyOpt/plotting/plots_bo.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,15 +127,15 @@ def plot_convergence(Xdata,best_Y, filename = None):
## Distances between consecutive x's
plt.figure(figsize=(10,5))
plt.subplot(1, 2, 1)
plt.plot(range(n-1), distances, '-ro')
plt.plot(list(range(n-1)), distances, '-ro')
plt.xlabel('Iteration')
plt.ylabel('d(x[n], x[n-1])')
plt.title('Distance between consecutive x\'s')
grid(True)

# Estimated m(x) at the proposed sampling points
plt.subplot(1, 2, 2)
plt.plot(range(n),best_Y,'-o')
plt.plot(list(range(n)),best_Y,'-o')
plt.title('Value of the best selected sample')
plt.xlabel('Iteration')
plt.ylabel('Best y')
Expand Down
4 changes: 2 additions & 2 deletions examples/six_hump_camel/camel.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ def main(job_id, params):
x = params['X'][0]
y = params['Y'][0]
res = camel(x, y)
print "The Six hump camel back function:"
print "\tf(%.4f, %0.4f) = %f" % (x, y, res)
print('The Six hump camel back function:')
print('\tf(%.4f, %0.4f) = %f' % (x, y, res))
return camel(x, y)


Expand Down
2 changes: 1 addition & 1 deletion gpyopt.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
if __name__ == '__main__':
import sys,os
if len(sys.argv)<1:
print 'Need the config file!'
print('Need the config file!')
exit()

configfile = sys.argv[1]
Expand Down

0 comments on commit 51630cd

Please sign in to comment.