From 83d54700d45825e479909d28830ae99fbc7fb67f Mon Sep 17 00:00:00 2001 From: mertyg Date: Tue, 11 Jun 2024 16:07:55 -0700 Subject: [PATCH] fix task argument, touch readme --- README.md | 4 ++-- evaluation/prompt_optimization.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index b711829..d659d49 100644 --- a/README.md +++ b/README.md @@ -50,7 +50,7 @@ TextGrad can optimize unstructured variables, such as text. Let us have an initi ```python import textgrad as tg -tg.set_backward_engine(tg.get_engine("gpt-4o")) +tg.set_backward_engine("gpt-4o") initial_solution = """To solve the equation 3x^2 - 7x + 2 = 0, we use the quadratic formula: x = (-b ± √(b^2 - 4ac)) / 2a @@ -112,7 +112,7 @@ TextGrad can also optimize prompts in PyTorch style! Here's how to do it with Te ```python import textgrad as tg llm_engine = tg.get_engine("gpt-3.5-turbo") -tg.set_backward_engine(tg.get_engine("gpt-4o")) +tg.set_backward_engine("gpt-4o") _, val_set, _, eval_fn = load_task("BBH_object_counting", llm_engine) question_str, answer_str = val_set[0] diff --git a/evaluation/prompt_optimization.py b/evaluation/prompt_optimization.py index 31c90f0..130c7fb 100644 --- a/evaluation/prompt_optimization.py +++ b/evaluation/prompt_optimization.py @@ -16,7 +16,7 @@ def set_seed(seed): def config(): parser = argparse.ArgumentParser(description="Optimize a prompt for a task.") - parser.add_argument("--task", type=str, default=" ", help="The task to evaluate the model on.") + parser.add_argument("--task", type=str, default="BBH_object_counting", help="The task to evaluate the model on.") parser.add_argument("--evaluation_engine", type=str, default="gpt-4o", help="The API to use for evaluation.") parser.add_argument("--test_engine", type=str, default="gpt-3.5-turbo-0125", help="The API to use for evaluation.") parser.add_argument("--batch_size", type=int, default=3, help="The batch size to use for training.")