-
Notifications
You must be signed in to change notification settings - Fork 140
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Fix kwarg passing in combined example #356
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -29,8 +29,41 @@ def function_to_minimize(x): | |
return numpy.sin(x[0]) * numpy.cos(x[1]) + numpy.cos(x[0] + x[1]) + numpy.random.uniform(-0.02, 0.02) | ||
|
||
|
||
def run_example(num_to_sample=20, verbose=True, testapp=None, **kwargs): | ||
"""Run the combined example.""" | ||
def run_example(num_to_sample=20, verbose=True, testapp=None, gp_next_points_kwargs=None, gp_hyper_opt_kwargs=None, gp_mean_var_kwargs=None, **kwargs): | ||
"""Run the combined example. | ||
|
||
:param num_to_sample: Number of points for MOE to suggest and then sample [20] | ||
:type num_to_sample: int > 0 | ||
:param verbose: Whether to print information to the screen [True] | ||
:type verbose: bool | ||
:param testapp: Whether to use a supplied test pyramid application or a rest server [None] | ||
:type testapp: Pyramid test application | ||
:param gp_next_points_kwargs: Optional kwargs to pass to gp_next_points endpoint | ||
:type gp_next_points_kwargs: dict | ||
:param gp_hyper_opt_kwargs: Optional kwargs to pass to gp_hyper_opt_kwargs endpoint | ||
:type gp_hyper_opt_kwargs: dict | ||
:param gp_mean_var_kwargs: Optional kwargs to pass to gp_mean_var_kwargs endpoint | ||
:type gp_mean_var_kwargs: dict | ||
:param kwargs: Optional kwargs to pass to all endpoints | ||
:type kwargs: dict | ||
|
||
""" | ||
# Set and combine all optional kwargs | ||
if gp_next_points_kwargs is None: | ||
gp_next_points_kwargs = {} | ||
else: | ||
gp_next_points_kwargs = dict(kwargs.items() + gp_next_points_kwargs.items()) | ||
|
||
if gp_hyper_opt_kwargs is None: | ||
gp_hyper_opt_kwargs = {} | ||
else: | ||
gp_hyper_opt_kwargs = dict(kwargs.items() + gp_hyper_opt_kwargs.items()) | ||
|
||
if gp_mean_var_kwargs is None: | ||
gp_mean_var_kwargs = {} | ||
else: | ||
gp_mean_var_kwargs = dict(kwargs.items() + gp_mean_var_kwargs.items()) | ||
|
||
exp = Experiment([[0, 2], [0, 4]]) | ||
# Bootstrap with some known or already sampled point(s) | ||
exp.historical_data.append_sample_points([ | ||
|
@@ -41,7 +74,7 @@ def run_example(num_to_sample=20, verbose=True, testapp=None, **kwargs): | |
for i in range(num_to_sample): | ||
covariance_info = {} | ||
if i > 0 and i % 5 == 0: | ||
covariance_info = gp_hyper_opt(exp.historical_data.to_list_of_sample_points(), testapp=testapp, **kwargs) | ||
covariance_info = gp_hyper_opt(exp.historical_data.to_list_of_sample_points(), testapp=testapp, **gp_hyper_opt_kwargs) | ||
|
||
if verbose: | ||
print "Updated covariance_info with {0:s}".format(str(covariance_info)) | ||
|
@@ -50,7 +83,7 @@ def run_example(num_to_sample=20, verbose=True, testapp=None, **kwargs): | |
exp, | ||
covariance_info=covariance_info, | ||
testapp=testapp, | ||
**kwargs | ||
**gp_next_points_kwargs | ||
)[0] # By default we only ask for one point | ||
# Sample the point from our objective function, we can replace this with any function | ||
value_of_next_point = function_to_minimize(next_point_to_sample) | ||
|
@@ -66,6 +99,7 @@ def run_example(num_to_sample=20, verbose=True, testapp=None, **kwargs): | |
exp.historical_data.to_list_of_sample_points(), # Historical data to inform Gaussian Process | ||
points_to_evaluate, # We will calculate the mean and variance of the GP at these points | ||
testapp=testapp, | ||
**kwargs | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. i think this should be mean_var_kwargs There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. good call. |
||
) | ||
|
||
if verbose: | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -12,18 +12,29 @@ class CombinedExampleTest(MoeExampleTestCase): | |
|
||
"""Test the combined_example MOE example.""" | ||
|
||
def test_example_runs(self): | ||
"""Simple integration test for example.""" | ||
def test_example_runs_with_non_default_optimizer_kwargs(self): | ||
"""Simple integration test for example with non default kwargs.""" | ||
run_example( | ||
num_to_sample=1, | ||
verbose=False, | ||
testapp=self.testapp, | ||
optimizer_info={ | ||
'num_multistarts': TEST_OPTIMIZER_MULTISTARTS, | ||
'num_random_samples': TEST_OPTIMIZER_NUM_RANDOM_SAMPLES, | ||
'optimizer_parameters': TEST_GRADIENT_DESCENT_PARAMETERS._asdict(), | ||
}) | ||
|
||
gp_next_points_kwargs={ | ||
'optimizer_info': { | ||
'num_multistarts': TEST_OPTIMIZER_MULTISTARTS, | ||
'num_random_samples': TEST_OPTIMIZER_NUM_RANDOM_SAMPLES, | ||
'optimizer_parameters': TEST_GRADIENT_DESCENT_PARAMETERS._asdict(), | ||
} | ||
}, | ||
gp_hyper_opt_kwargs={ | ||
'optimizer_info': { | ||
'num_multistarts': TEST_OPTIMIZER_MULTISTARTS, | ||
'num_random_samples': TEST_OPTIMIZER_NUM_RANDOM_SAMPLES, | ||
'optimizer_parameters': TEST_GRADIENT_DESCENT_PARAMETERS._asdict(), | ||
} | ||
}, | ||
gp_mean_var_kwargs={}, | ||
rest_port=1337, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. why are the last 2 needed? ({} and 1337) There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. tests that passing them doesn't break anything (it did before) |
||
) | ||
|
||
if __name__ == "__main__": | ||
T.run() |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
be explicit about the more 'specific' kwargs taking precedence if keys are multiply-specified
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
done
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
i meant in the function docs, seems impt for callers to see/know this? up to you though, at least it's in writing now