Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
kennymckormick committed Dec 5, 2023
1 parent 871cf9c commit 19dddb8
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
4 changes: 2 additions & 2 deletions vlmeval/chat_api/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,10 @@ def generate(self, inputs, **kwargs):
if ret_code == 0 and self.fail_msg not in answer and answer != '':
return answer
elif self.verbose:
warnings.warn(f"RetCode: {ret_code}\nAnswer: {answer}\nLog: {log}")
print(f"RetCode: {ret_code}\nAnswer: {answer}\nLog: {log}")
except:
if self.verbose:
warnings.warn(f"An unknown exception occurs during try {i}")
print(f"An unknown exception occurs during try {i}")
time.sleep(self.wait)
return self.fail_msg if answer in ['', None] else answer

Expand Down
2 changes: 1 addition & 1 deletion vlmeval/chat_api/gpt_int.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def generate_inner(self, inputs, **kwargs) -> str:
context_window = GPT_context_window(self.model)
max_tokens = min(max_tokens, context_window - self.get_token_len(inputs))
if 0 < max_tokens <= 100:
warnings.warn('Less than 100 tokens left, may exceed the context window with some additional meta symbols. ')
print('Less than 100 tokens left, may exceed the context window with some additional meta symbols. ')
if max_tokens <= 0:
return 0, self.fail_msg + 'Input string longer than context window. ', 'Length Exceeded. '

Expand Down

0 comments on commit 19dddb8

Please sign in to comment.