Skip to content

Commit

Permalink
feat: Improve patch formatting and handle empty data in pr_code_sugge…
Browse files Browse the repository at this point in the history
…stions.py
  • Loading branch information
mrT23 committed Jan 29, 2024
1 parent 0d86779 commit 15c8fe9
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 1 deletion.
7 changes: 7 additions & 0 deletions pr_agent/algo/pr_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,6 +375,13 @@ def get_pr_multi_diffs(git_provider: GitProvider,
for lang in pr_languages:
sorted_files.extend(sorted(lang['files'], key=lambda x: x.tokens, reverse=True))


# try first a single run with standard diff string, with patch extension, and no deletions
patches_extended, total_tokens, patches_extended_tokens = pr_generate_extended_diff(
pr_languages, token_handler, add_line_numbers_to_hunks=True)
if total_tokens + OUTPUT_BUFFER_TOKENS_SOFT_THRESHOLD < get_max_tokens(model):
return ["\n".join(patches_extended)]

patches = []
final_diff_list = []
total_tokens = token_handler.prompt_tokens
Expand Down
5 changes: 4 additions & 1 deletion pr_agent/tools/pr_code_suggestions.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ async def _prepare_prediction_extended(self, model: str) -> dict:
for i, patches_diff in enumerate(patches_diff_list):
get_logger().info(f"Processing chunk {i + 1} of {len(patches_diff_list)}")
self.patches_diff = patches_diff
prediction = await self._get_prediction(model)
prediction = await self._get_prediction(model) # toDo: parallelize
prediction_list.append(prediction)
self.prediction_list = prediction_list

Expand Down Expand Up @@ -259,6 +259,9 @@ async def rank_suggestions(self, data: List) -> List:
suggestion_list.append(suggestion)
data_sorted = [[]] * len(suggestion_list)

if len(suggestion_list ) == 1:
return suggestion_list

try:
suggestion_str = ""
for i, suggestion in enumerate(suggestion_list):
Expand Down

0 comments on commit 15c8fe9

Please sign in to comment.