From 9454717f6f02490d2016cd46b70441c9ce5f5af8 Mon Sep 17 00:00:00 2001 From: Simnol22 Date: Thu, 8 Sep 2022 13:44:31 -0400 Subject: [PATCH 1/2] Fixing some missing/argument code and adding some if __name__ == '__main__' so that it doesnt crash --- docs/src/tutorials/pytorch-mnist.rst | 2 +- docs/src/tutorials/scikit-learn.rst | 2 +- examples/tutorials/code_1_python_api.py | 36 +++--- .../tutorials/code_2_hyperband_checkpoint.py | 106 +++++++++--------- 4 files changed, 73 insertions(+), 73 deletions(-) diff --git a/docs/src/tutorials/pytorch-mnist.rst b/docs/src/tutorials/pytorch-mnist.rst index 5e9429653..10ca068bf 100644 --- a/docs/src/tutorials/pytorch-mnist.rst +++ b/docs/src/tutorials/pytorch-mnist.rst @@ -55,7 +55,7 @@ be called only once because Oríon only looks at 1 ``'objective'`` value per run .. code-block:: python - test_error_rate = test(args, model, device, test_loader) + test_error_rate = test(model, device, test_loader) report_objective(test_error_rate) diff --git a/docs/src/tutorials/scikit-learn.rst b/docs/src/tutorials/scikit-learn.rst index 5a33967cb..393d653cb 100644 --- a/docs/src/tutorials/scikit-learn.rst +++ b/docs/src/tutorials/scikit-learn.rst @@ -19,7 +19,7 @@ Sample script .. literalinclude:: /../../examples/scikitlearn-iris/main.py :language: python - :lines: 1-2, 5-9, 13-30 + :lines: 1-9, 13-30 This very basic script takes in parameter one positional argument for the hyper-parameter *epsilon* which control the loss in the script. diff --git a/examples/tutorials/code_1_python_api.py b/examples/tutorials/code_1_python_api.py index 3bcb17542..0992b25b1 100644 --- a/examples/tutorials/code_1_python_api.py +++ b/examples/tutorials/code_1_python_api.py @@ -52,13 +52,13 @@ # a ``ValueError`` will be raised. At least one of the results must have the type ``objective``, # the metric that is minimized by the algorithm. +if __name__ == '__main__': + def rosenbrock(x, noise=None): + """Evaluate partial information of a quadratic.""" + y = x - 34.56789 + z = 4 * y**2 + 23.4 -def rosenbrock(x, noise=None): - """Evaluate partial information of a quadratic.""" - y = x - 34.56789 - z = 4 * y**2 + 23.4 - - return [{"name": "objective", "type": "objective", "value": z}] + return [{"name": "objective", "type": "objective", "value": z}] #%% @@ -67,12 +67,12 @@ def rosenbrock(x, noise=None): # will iteratively try new sets of hyperparameters suggested by the optimization algorithm # until it reaches 20 trials. -experiment.workon(rosenbrock, max_trials=20) + experiment.workon(rosenbrock, max_trials=20) #%% # Now let's plot the regret curve to see how well went the optimization. -experiment.plot.regret().show() + experiment.plot.regret().show() #%% # .. This file is produced by docs/scripts/build_database_and_plots.py @@ -98,19 +98,19 @@ def rosenbrock(x, noise=None): # that can easily find the optimal solution. We specify the algorithm configuration t # :func:`build experiment ` -experiment = build_experiment( - "tpe-rosenbrock", - space=space, - algorithms={"tpe": {"n_initial_points": 5}}, - storage=storage, -) + experiment = build_experiment( + "tpe-rosenbrock", + space=space, + algorithms={"tpe": {"n_initial_points": 5}}, + storage=storage, + ) -#%% -# We then again run the optimization for 20 trials and plot the regret. + #%% + # We then again run the optimization for 20 trials and plot the regret. -experiment.workon(rosenbrock, max_trials=20) + experiment.workon(rosenbrock, max_trials=20) -experiment.plot.regret().show() + experiment.plot.regret().show() # sphinx_gallery_thumbnail_path = '_static/python.png' diff --git a/examples/tutorials/code_2_hyperband_checkpoint.py b/examples/tutorials/code_2_hyperband_checkpoint.py index 061d00226..51af423fa 100644 --- a/examples/tutorials/code_2_hyperband_checkpoint.py +++ b/examples/tutorials/code_2_hyperband_checkpoint.py @@ -122,7 +122,7 @@ def build_data_loaders(batch_size, split_seed=1): # Next, we write the function to save checkpoints. It is important to include # not only the model in the checkpoint, but also the optimizer and the learning rate # schedule when using one. In this example we will use the exponential learning rate schedule, -# so we checkpoint it. We save the current epoch as well so that we now where we resume from. +# so we checkpoint it. We save the current epoch as well so that we know where we resume from. def save_checkpoint(checkpoint, model, optimizer, lr_scheduler, epoch): @@ -259,8 +259,8 @@ def main( #%% # You can test the training pipeline before working with the hyperparameter optimization. - -main(epochs=4) +if __name__ == '__main__': + main(epochs=4) #%% @@ -277,58 +277,58 @@ def main( # checkpoint file with ``f"{experiment.working_dir}/{trial.hash_params}"``. -from orion.client import build_experiment - + from orion.client import build_experiment -def run_hpo(): - # Specify the database where the experiments are stored. We use a local PickleDB here. - storage = { - "type": "legacy", - "database": { - "type": "pickleddb", - "host": "./db.pkl", - }, - } + def run_hpo(): - # Load the data for the specified experiment - experiment = build_experiment( - "hyperband-cifar10", - space={ - "epochs": "fidelity(1, 120, base=4)", - "learning_rate": "loguniform(1e-5, 0.1)", - "momentum": "uniform(0, 0.9)", - "weight_decay": "loguniform(1e-10, 1e-2)", - "gamma": "loguniform(0.97, 1)", - }, - algorithms={ - "hyperband": { - "seed": 1, - "repetitions": 5, + # Specify the database where the experiments are stored. We use a local PickleDB here. + storage = { + "type": "legacy", + "database": { + "type": "pickleddb", + "host": "./db.pkl", }, - }, - storage=storage, - ) - - trials = 1 - while not experiment.is_done: - print("trial", trials) - trial = experiment.suggest() - if trial is None and experiment.is_done: - break - valid_error_rate = main( - **trial.params, checkpoint=f"{experiment.working_dir}/{trial.hash_params}" + } + + # Load the data for the specified experiment + experiment = build_experiment( + "hyperband-cifar10", + space={ + "epochs": "fidelity(1, 120, base=4)", + "learning_rate": "loguniform(1e-5, 0.1)", + "momentum": "uniform(0, 0.9)", + "weight_decay": "loguniform(1e-10, 1e-2)", + "gamma": "loguniform(0.97, 1)", + }, + algorithms={ + "hyperband": { + "seed": 1, + "repetitions": 5, + }, + }, + storage=storage, ) - experiment.observe(trial, valid_error_rate, name="valid_error_rate") - trials += 1 + trials = 1 + while not experiment.is_done: + print("trial", trials) + trial = experiment.suggest() + if trial is None and experiment.is_done: + break + valid_error_rate = main( + **trial.params, checkpoint=f"{experiment.working_dir}/{trial.hash_params}" + ) + experiment.observe(trial, valid_error_rate, name="valid_error_rate") + trials += 1 -#%% -# Let's run the optimization now. You may want to reduce the maximum number of epochs in -# ``fidelity(1, 120, base=4)`` and set the number of ``repetitions`` to 1 to get results more -# quickly. With current configuration, this example takes 2 days to run on a Titan RTX. -experiment = run_hpo() + #%% + # Let's run the optimization now. You may want to reduce the maximum number of epochs in + # ``fidelity(1, 120, base=4)`` and set the number of ``repetitions`` to 1 to get results more + # quickly. With current configuration, this example takes 2 days to run on a Titan RTX. + + experiment = run_hpo() #%% # Analysis @@ -340,8 +340,8 @@ def run_hpo(): # We should first look at the :ref:`sphx_glr_auto_examples_plot_1_regret.py` # to verify the optimization with Hyperband. -fig = experiment.plot.regret() -fig.show() + fig = experiment.plot.regret() + fig.show() #%% # .. This file is produced by docs/scripts/build_database_and_plots.py @@ -357,8 +357,8 @@ def run_hpo(): # lower than 10%. To see if the search space may be the issue, we first look at the # :ref:`sphx_glr_auto_examples_plot_3_lpi.py`. -fig = experiment.plot.lpi() -fig.show() + fig = experiment.plot.lpi() + fig.show() #%% # .. raw:: html @@ -370,8 +370,8 @@ def run_hpo(): # it is worth looking at the :ref:`sphx_glr_auto_examples_plot_4_partial_dependencies.py` # to see if the search space was perhaps too narrow or too large. -fig = experiment.plot.partial_dependencies(params=["gamma", "learning_rate"]) -fig.show() + fig = experiment.plot.partial_dependencies(params=["gamma", "learning_rate"]) + fig.show() # sphinx_gallery_thumbnail_path = '_static/restart.png' From 5660f24aa0e33990b4c8b63fb522f6337d078c95 Mon Sep 17 00:00:00 2001 From: Simnol22 Date: Fri, 9 Sep 2022 09:42:36 -0400 Subject: [PATCH 2/2] Indenting fixed and adding warning for macos users. Fixed black formatting issue --- examples/tutorials/code_1_python_api.py | 39 ++++--- .../tutorials/code_2_hyperband_checkpoint.py | 107 +++++++++--------- 2 files changed, 76 insertions(+), 70 deletions(-) diff --git a/examples/tutorials/code_1_python_api.py b/examples/tutorials/code_1_python_api.py index 0992b25b1..fadc373bf 100644 --- a/examples/tutorials/code_1_python_api.py +++ b/examples/tutorials/code_1_python_api.py @@ -7,6 +7,9 @@ 1-d ``rosenbrock`` function with random search and TPE and visualize the regret curve to compare the algorithms. +Note for macos users : You will need to either run this page as a jupyter notebook in order for it to compile, or +encapsulate the code in a main function and running it under ``if __name__ == '__main__'``. + We first import the only function needed, :func:`build experiment `. """ from orion.client import build_experiment @@ -52,13 +55,13 @@ # a ``ValueError`` will be raised. At least one of the results must have the type ``objective``, # the metric that is minimized by the algorithm. -if __name__ == '__main__': - def rosenbrock(x, noise=None): - """Evaluate partial information of a quadratic.""" - y = x - 34.56789 - z = 4 * y**2 + 23.4 - return [{"name": "objective", "type": "objective", "value": z}] +def rosenbrock(x, noise=None): + """Evaluate partial information of a quadratic.""" + y = x - 34.56789 + z = 4 * y**2 + 23.4 + + return [{"name": "objective", "type": "objective", "value": z}] #%% @@ -67,12 +70,12 @@ def rosenbrock(x, noise=None): # will iteratively try new sets of hyperparameters suggested by the optimization algorithm # until it reaches 20 trials. - experiment.workon(rosenbrock, max_trials=20) +experiment.workon(rosenbrock, max_trials=20) #%% # Now let's plot the regret curve to see how well went the optimization. - experiment.plot.regret().show() +experiment.plot.regret().show() #%% # .. This file is produced by docs/scripts/build_database_and_plots.py @@ -98,19 +101,19 @@ def rosenbrock(x, noise=None): # that can easily find the optimal solution. We specify the algorithm configuration t # :func:`build experiment ` - experiment = build_experiment( - "tpe-rosenbrock", - space=space, - algorithms={"tpe": {"n_initial_points": 5}}, - storage=storage, - ) +experiment = build_experiment( + "tpe-rosenbrock", + space=space, + algorithms={"tpe": {"n_initial_points": 5}}, + storage=storage, +) - #%% - # We then again run the optimization for 20 trials and plot the regret. +#%% +# We then again run the optimization for 20 trials and plot the regret. - experiment.workon(rosenbrock, max_trials=20) +experiment.workon(rosenbrock, max_trials=20) - experiment.plot.regret().show() +experiment.plot.regret().show() # sphinx_gallery_thumbnail_path = '_static/python.png' diff --git a/examples/tutorials/code_2_hyperband_checkpoint.py b/examples/tutorials/code_2_hyperband_checkpoint.py index 51af423fa..df3f209ae 100644 --- a/examples/tutorials/code_2_hyperband_checkpoint.py +++ b/examples/tutorials/code_2_hyperband_checkpoint.py @@ -35,6 +35,9 @@ checkpoints. We will demonstrate below how this can be done with PyTorch, but using Oríon's Python API. +Note for macos users : You will need to either run this page as a jupyter notebook in order for it to compile, or +encapsulate the code in a main function and running it under ``if __name__ == '__main__'``. + Training code ------------- @@ -259,8 +262,8 @@ def main( #%% # You can test the training pipeline before working with the hyperparameter optimization. -if __name__ == '__main__': - main(epochs=4) + +main(epochs=4) #%% @@ -277,58 +280,58 @@ def main( # checkpoint file with ``f"{experiment.working_dir}/{trial.hash_params}"``. - from orion.client import build_experiment +from orion.client import build_experiment - def run_hpo(): +def run_hpo(): - # Specify the database where the experiments are stored. We use a local PickleDB here. - storage = { - "type": "legacy", - "database": { - "type": "pickleddb", - "host": "./db.pkl", - }, - } - - # Load the data for the specified experiment - experiment = build_experiment( - "hyperband-cifar10", - space={ - "epochs": "fidelity(1, 120, base=4)", - "learning_rate": "loguniform(1e-5, 0.1)", - "momentum": "uniform(0, 0.9)", - "weight_decay": "loguniform(1e-10, 1e-2)", - "gamma": "loguniform(0.97, 1)", - }, - algorithms={ - "hyperband": { - "seed": 1, - "repetitions": 5, - }, + # Specify the database where the experiments are stored. We use a local PickleDB here. + storage = { + "type": "legacy", + "database": { + "type": "pickleddb", + "host": "./db.pkl", + }, + } + + # Load the data for the specified experiment + experiment = build_experiment( + "hyperband-cifar10", + space={ + "epochs": "fidelity(1, 120, base=4)", + "learning_rate": "loguniform(1e-5, 0.1)", + "momentum": "uniform(0, 0.9)", + "weight_decay": "loguniform(1e-10, 1e-2)", + "gamma": "loguniform(0.97, 1)", + }, + algorithms={ + "hyperband": { + "seed": 1, + "repetitions": 5, }, - storage=storage, - ) + }, + storage=storage, + ) - trials = 1 - while not experiment.is_done: - print("trial", trials) - trial = experiment.suggest() - if trial is None and experiment.is_done: - break - valid_error_rate = main( - **trial.params, checkpoint=f"{experiment.working_dir}/{trial.hash_params}" - ) - experiment.observe(trial, valid_error_rate, name="valid_error_rate") - trials += 1 + trials = 1 + while not experiment.is_done: + print("trial", trials) + trial = experiment.suggest() + if trial is None and experiment.is_done: + break + valid_error_rate = main( + **trial.params, checkpoint=f"{experiment.working_dir}/{trial.hash_params}" + ) + experiment.observe(trial, valid_error_rate, name="valid_error_rate") + trials += 1 - #%% - # Let's run the optimization now. You may want to reduce the maximum number of epochs in - # ``fidelity(1, 120, base=4)`` and set the number of ``repetitions`` to 1 to get results more - # quickly. With current configuration, this example takes 2 days to run on a Titan RTX. +#%% +# Let's run the optimization now. You may want to reduce the maximum number of epochs in +# ``fidelity(1, 120, base=4)`` and set the number of ``repetitions`` to 1 to get results more +# quickly. With current configuration, this example takes 2 days to run on a Titan RTX. - experiment = run_hpo() +experiment = run_hpo() #%% # Analysis @@ -340,8 +343,8 @@ def run_hpo(): # We should first look at the :ref:`sphx_glr_auto_examples_plot_1_regret.py` # to verify the optimization with Hyperband. - fig = experiment.plot.regret() - fig.show() +fig = experiment.plot.regret() +fig.show() #%% # .. This file is produced by docs/scripts/build_database_and_plots.py @@ -357,8 +360,8 @@ def run_hpo(): # lower than 10%. To see if the search space may be the issue, we first look at the # :ref:`sphx_glr_auto_examples_plot_3_lpi.py`. - fig = experiment.plot.lpi() - fig.show() +fig = experiment.plot.lpi() +fig.show() #%% # .. raw:: html @@ -370,8 +373,8 @@ def run_hpo(): # it is worth looking at the :ref:`sphx_glr_auto_examples_plot_4_partial_dependencies.py` # to see if the search space was perhaps too narrow or too large. - fig = experiment.plot.partial_dependencies(params=["gamma", "learning_rate"]) - fig.show() +fig = experiment.plot.partial_dependencies(params=["gamma", "learning_rate"]) +fig.show() # sphinx_gallery_thumbnail_path = '_static/restart.png'