diff --git a/lm_eval/models/openai_completions.py b/lm_eval/models/openai_completions.py index dd0f3295f9..c10368742d 100644 --- a/lm_eval/models/openai_completions.py +++ b/lm_eval/models/openai_completions.py @@ -126,6 +126,9 @@ def _create_payload( seed=1234, **kwargs, ) -> dict: + assert ( + type(messages) is not str + ), "chat-completions require the --apply_chat_template flag." gen_kwargs.pop("do_sample", False) if "max_tokens" in gen_kwargs: max_tokens = gen_kwargs.pop("max_tokens") @@ -247,6 +250,9 @@ def _create_payload( seed=1234, **kwargs, ) -> dict: + assert ( + type(messages) is not str + ), "chat-completions require the --apply_chat_template flag." gen_kwargs.pop("do_sample", False) if "max_tokens" in gen_kwargs: max_tokens = gen_kwargs.pop("max_tokens")