Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix kwarg passing in combined example #356

Merged
merged 2 commits into from
Aug 5, 2014
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 38 additions & 4 deletions moe_examples/combined_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,41 @@ def function_to_minimize(x):
return numpy.sin(x[0]) * numpy.cos(x[1]) + numpy.cos(x[0] + x[1]) + numpy.random.uniform(-0.02, 0.02)


def run_example(num_to_sample=20, verbose=True, testapp=None, **kwargs):
"""Run the combined example."""
def run_example(num_to_sample=20, verbose=True, testapp=None, gp_next_points_kwargs=None, gp_hyper_opt_kwargs=None, gp_mean_var_kwargs=None, **kwargs):
"""Run the combined example.

:param num_to_sample: Number of points for MOE to suggest and then sample [20]
:type num_to_sample: int > 0
:param verbose: Whether to print information to the screen [True]
:type verbose: bool
:param testapp: Whether to use a supplied test pyramid application or a rest server [None]
:type testapp: Pyramid test application
:param gp_next_points_kwargs: Optional kwargs to pass to gp_next_points endpoint
:type gp_next_points_kwargs: dict
:param gp_hyper_opt_kwargs: Optional kwargs to pass to gp_hyper_opt_kwargs endpoint
:type gp_hyper_opt_kwargs: dict
:param gp_mean_var_kwargs: Optional kwargs to pass to gp_mean_var_kwargs endpoint
:type gp_mean_var_kwargs: dict
:param kwargs: Optional kwargs to pass to all endpoints
:type kwargs: dict
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

be explicit about the more 'specific' kwargs taking precedence if keys are multiply-specified

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

done

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i meant in the function docs, seems impt for callers to see/know this? up to you though, at least it's in writing now


"""
# Set and combine all optional kwargs
if gp_next_points_kwargs is None:
gp_next_points_kwargs = {}
else:
gp_next_points_kwargs = dict(kwargs.items() + gp_next_points_kwargs.items())

if gp_hyper_opt_kwargs is None:
gp_hyper_opt_kwargs = {}
else:
gp_hyper_opt_kwargs = dict(kwargs.items() + gp_hyper_opt_kwargs.items())

if gp_mean_var_kwargs is None:
gp_mean_var_kwargs = {}
else:
gp_mean_var_kwargs = dict(kwargs.items() + gp_mean_var_kwargs.items())

exp = Experiment([[0, 2], [0, 4]])
# Bootstrap with some known or already sampled point(s)
exp.historical_data.append_sample_points([
Expand All @@ -41,7 +74,7 @@ def run_example(num_to_sample=20, verbose=True, testapp=None, **kwargs):
for i in range(num_to_sample):
covariance_info = {}
if i > 0 and i % 5 == 0:
covariance_info = gp_hyper_opt(exp.historical_data.to_list_of_sample_points(), testapp=testapp, **kwargs)
covariance_info = gp_hyper_opt(exp.historical_data.to_list_of_sample_points(), testapp=testapp, **gp_hyper_opt_kwargs)

if verbose:
print "Updated covariance_info with {0:s}".format(str(covariance_info))
Expand All @@ -50,7 +83,7 @@ def run_example(num_to_sample=20, verbose=True, testapp=None, **kwargs):
exp,
covariance_info=covariance_info,
testapp=testapp,
**kwargs
**gp_next_points_kwargs
)[0] # By default we only ask for one point
# Sample the point from our objective function, we can replace this with any function
value_of_next_point = function_to_minimize(next_point_to_sample)
Expand All @@ -66,6 +99,7 @@ def run_example(num_to_sample=20, verbose=True, testapp=None, **kwargs):
exp.historical_data.to_list_of_sample_points(), # Historical data to inform Gaussian Process
points_to_evaluate, # We will calculate the mean and variance of the GP at these points
testapp=testapp,
**kwargs
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i think this should be mean_var_kwargs

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

good call.

)

if verbose:
Expand Down
27 changes: 19 additions & 8 deletions moe_examples/tests/combined_example_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,29 @@ class CombinedExampleTest(MoeExampleTestCase):

"""Test the combined_example MOE example."""

def test_example_runs(self):
"""Simple integration test for example."""
def test_example_runs_with_non_default_optimizer_kwargs(self):
"""Simple integration test for example with non default kwargs."""
run_example(
num_to_sample=1,
verbose=False,
testapp=self.testapp,
optimizer_info={
'num_multistarts': TEST_OPTIMIZER_MULTISTARTS,
'num_random_samples': TEST_OPTIMIZER_NUM_RANDOM_SAMPLES,
'optimizer_parameters': TEST_GRADIENT_DESCENT_PARAMETERS._asdict(),
})

gp_next_points_kwargs={
'optimizer_info': {
'num_multistarts': TEST_OPTIMIZER_MULTISTARTS,
'num_random_samples': TEST_OPTIMIZER_NUM_RANDOM_SAMPLES,
'optimizer_parameters': TEST_GRADIENT_DESCENT_PARAMETERS._asdict(),
}
},
gp_hyper_opt_kwargs={
'optimizer_info': {
'num_multistarts': TEST_OPTIMIZER_MULTISTARTS,
'num_random_samples': TEST_OPTIMIZER_NUM_RANDOM_SAMPLES,
'optimizer_parameters': TEST_GRADIENT_DESCENT_PARAMETERS._asdict(),
}
},
gp_mean_var_kwargs={},
rest_port=1337,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why are the last 2 needed? ({} and 1337)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

tests that passing them doesn't break anything (it did before)

)

if __name__ == "__main__":
T.run()