Skip to content

Commit

Permalink
Merge pull request #187 from vibinex/tr/phind/2.0
Browse files Browse the repository at this point in the history
Diff Graph implementation
  • Loading branch information
tapishr authored Oct 12, 2024
2 parents 613a037 + 0ba7fc5 commit 0660db6
Show file tree
Hide file tree
Showing 23 changed files with 3,568 additions and 32 deletions.
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ FROM ubuntu:latest
# # Install dependencies required by the application
RUN \
apt-get update && \
apt-get install ca-certificates git -y && \
apt-get install ca-certificates git ripgrep -y && \
apt-get clean
ADD http://archive.ubuntu.com/ubuntu/pool/main/o/openssl/libssl1.1_1.1.1f-1ubuntu2_amd64.deb /tmp
RUN chmod a+x /tmp/libssl1.1_1.1.1f-1ubuntu2_amd64.deb && \
Expand Down Expand Up @@ -45,7 +45,7 @@ ENV PROVIDER=$PROVIDER
COPY ./vibi-dpu/target/debug/vibi-dpu /app/vibi-dpu
COPY ./pubsub-sa.json /app/pubsub-sa.json
COPY ./repo-profiler.pem /app/repo-profiler.pem

COPY ./prompts /app/prompts
# Create directory for configuration
RUN mkdir /app/config

Expand Down
6 changes: 3 additions & 3 deletions vibi-dpu/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "vibi-dpu"
version = "1.0.0"
version = "2.0.0"
edition = "2021"
authors = ["Tapish Rathore <tapish@vibinex.com>"]
license = "GPL-3.0-or-later"
Expand All @@ -21,7 +21,7 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
base64ct = "1.5.3"
sha256 = "1.1.1"
reqwest = { version = "0.11", features = ["json", "blocking"] }
reqwest = { version = "0.11", features = ["json", "blocking", "stream"] }
google-cloud-pubsub = "0.15.0"
google-cloud-default = { version = "0.3.0", features = ["pubsub"] }
google-cloud-googleapis = "0.9.0"
Expand All @@ -37,5 +37,5 @@ once_cell = "1.18.0" # MIT
jsonwebtoken = "8.3.0" # MIT
fern = "0.6.2" # MIT
log = "0.4.20" # MIT/Apache2

walkdir = "2.5.0" # Unlicence/MIT
# todo - check all lib licences
46 changes: 46 additions & 0 deletions vibi-dpu/src/core/diff_graph.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
use crate::graph::mermaid_elements::generate_mermaid_flowchart;
use crate::utils::user::ProviderEnum;
use crate::utils::review::Review;
use crate::core::github;
use crate::utils::gitops::StatItem;

pub async fn send_diff_graph(review: &Review, excluded_files: &Vec<StatItem>, small_files: &Vec<StatItem>, access_token: &str) {
let comment = diff_graph_comment_text(excluded_files, small_files, review).await;
// add comment for GitHub
if review.provider().to_string() == ProviderEnum::Github.to_string() {
log::info!("Inserting comment on repo {}...", review.repo_name());
github::comment::add_comment(&comment, review, &access_token).await;
}

// TODO: add comment for Bitbucket
}

async fn diff_graph_comment_text(excluded_files: &Vec<StatItem>, small_files: &Vec<StatItem>, review: &Review) -> String {
let mut comment = "Diff Graph:\n\n".to_string();

let all_diff_files: Vec<StatItem> = excluded_files
.iter()
.chain(small_files.iter())
.cloned() // Clone the StatItem instances since `iter` returns references
.collect(); // Collect into a new vector
if let Some(mermaid_text) = mermaid_comment(&all_diff_files, review).await {
comment += mermaid_text.as_str();
}
comment += "\nTo modify DiffGraph settings, go to [your Vibinex settings page.](https://vibinex.com/settings)\n";
return comment;
}

async fn mermaid_comment(diff_files: &Vec<StatItem>, review: &Review) -> Option<String> {
let flowchart_str_opt = generate_mermaid_flowchart(diff_files, review).await;
if flowchart_str_opt.is_none() {
log::error!("[mermaid_comment] Unable to generate flowchart for review: {}", review.id());
return None;
}
let flowchart_str = flowchart_str_opt.expect("Empty flowchart_str_opt");
let mermaid_comment = format!(
"### Call Stack Diff\n```mermaid\n{}\n```",
flowchart_str,
);
return Some(mermaid_comment);
}

3 changes: 2 additions & 1 deletion vibi-dpu/src/core/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,5 @@ pub mod utils;
pub mod approval;
pub mod bitbucket;
pub mod github;
pub mod trigger;
pub mod trigger;
pub mod diff_graph;
18 changes: 13 additions & 5 deletions vibi-dpu/src/core/relevance.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
use std::collections::{HashMap, HashSet};

use crate::{bitbucket::{self, user::author_from_commit}, core::github, db::review::save_review_to_db, utils::{aliases::get_login_handles, relevance::Relevance, hunk::{HunkMap, PrHunkItem}, user::ProviderEnum}};
use crate::{bitbucket::{self, user::author_from_commit}, core::github, db::review::save_review_to_db, utils::{aliases::get_login_handles, gitops::StatItem, hunk::{HunkMap, PrHunkItem}, relevance::Relevance, user::ProviderEnum}};
use crate::utils::review::Review;
use crate::utils::repo_config::RepoConfig;

pub async fn process_relevance(hunkmap: &HunkMap, review: &Review,
pub async fn process_relevance(hunkmap: &HunkMap, excluded_files: &Vec<StatItem>, review: &Review,
repo_config: &mut RepoConfig, access_token: &str, old_review_opt: &Option<Review>,
) {
log::info!("Processing relevance of code authors...");
Expand All @@ -22,7 +22,7 @@ pub async fn process_relevance(hunkmap: &HunkMap, review: &Review,
let relevance_vec = relevance_vec_opt.expect("Empty coverage_obj_opt");
if repo_config.comment() {
// create comment text
let comment = comment_text(&relevance_vec, repo_config.auto_assign());
let comment = relevant_reviewers_comment_text(&relevance_vec, repo_config.auto_assign(), excluded_files).await;
// add comment
if review.provider().to_string() == ProviderEnum::Bitbucket.to_string() {
// TODO - add feature flag check
Expand Down Expand Up @@ -184,7 +184,8 @@ async fn calculate_relevance(prhunk: &PrHunkItem, review: &mut Review) -> Option
return Some(relevance_vec);
}

fn comment_text(relevance_vec: &Vec<Relevance>, auto_assign: bool) -> String {
async fn relevant_reviewers_comment_text(relevance_vec: &Vec<Relevance>, auto_assign: bool,
excluded_files: &Vec<StatItem>) -> String {
let mut comment = "Relevant users for this PR:\n\n".to_string(); // Added two newlines
comment += "| Contributor Name/Alias | Relevance |\n"; // Added a newline at the end
comment += "| -------------- | --------------- |\n"; // Added a newline at the end
Expand All @@ -208,6 +209,14 @@ fn comment_text(relevance_vec: &Vec<Relevance>, auto_assign: bool) -> String {
comment += &format!("Missing profile handles for {} aliases. [Go to your Vibinex settings page](https://vibinex.com/settings) to map aliases to profile handles.", unmapped_aliases.len());
}

if !excluded_files.is_empty() {
comment += "\n\n";
comment += "Ignoring following files due to large size: ";
for file_item in excluded_files {
comment += &format!("- {}\n", file_item.filepath.as_str());
}
}

if auto_assign {
comment += "\n\n";
comment += "Auto assigning to relevant reviewers.";
Expand All @@ -216,7 +225,6 @@ fn comment_text(relevance_vec: &Vec<Relevance>, auto_assign: bool) -> String {
comment += "If you are a relevant reviewer, you can use the [Vibinex browser extension](https://chromewebstore.google.com/detail/vibinex-code-review/jafgelpkkkopeaefadkdjcmnicgpcncc) to see parts of the PR relevant to you\n"; // Added a newline at the end
comment += "Relevance of the reviewer is calculated based on the git blame information of the PR. To know more, hit us up at contact@vibinex.com.\n\n"; // Added two newlines
comment += "To change comment and auto-assign settings, go to [your Vibinex settings page.](https://vibinex.com/u)\n"; // Added a newline at the end

return comment;
}

Expand Down
42 changes: 30 additions & 12 deletions vibi-dpu/src/core/review.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
use std::env;
use std::{env, thread, time::Duration};

use serde_json::Value;

use crate::{
core::{relevance::process_relevance, utils::get_access_token},
core::{relevance::process_relevance, diff_graph::send_diff_graph, utils::get_access_token},
db::{
hunk::{get_hunk_from_db, store_hunkmap_to_db},
repo::get_clone_url_clone_dir,
repo_config::save_repo_config_to_db,
review::{get_review_from_db, save_review_to_db},
},
utils::{
gitops::{commit_exists, generate_blame, generate_diff, get_excluded_files, git_pull, process_diffmap},
gitops::{commit_exists, generate_blame, generate_diff, get_excluded_files, git_pull, process_diffmap, StatItem},
hunk::{HunkMap, PrHunkItem},
repo_config::RepoConfig,
reqwest_client::get_client,
Expand Down Expand Up @@ -41,11 +41,24 @@ pub async fn process_review(message_data: &Vec<u8>) {
}
let access_token = access_token_opt.expect("Empty access_token_opt");
commit_check(&review, &access_token).await;
let hunkmap_opt = process_review_changes(&review).await;
send_hunkmap(&hunkmap_opt, &review, &repo_config, &access_token, &old_review_opt).await;
process_review_changes(&review, &repo_config, &access_token, &old_review_opt).await;
}

pub async fn send_hunkmap(hunkmap_opt: &Option<HunkMap>, review: &Review,
pub async fn process_review_changes(review: &Review, repo_config: &RepoConfig, access_token: &str, old_review_opt: &Option<Review>) {
log::info!("Processing changes in code...");
if let Some((excluded_files, smallfiles)) = get_included_and_excluded_files(review) {
let hunkmap_opt = calculate_hunkmap(review, &smallfiles).await;
send_hunkmap(&hunkmap_opt, &excluded_files, review, repo_config, access_token, old_review_opt).await;

if repo_config.diff_graph() {
send_diff_graph(review, &excluded_files, &smallfiles, access_token).await;
}
} else {
log::error!("Failed to get included and excluded files");
}
}

pub async fn send_hunkmap(hunkmap_opt: &Option<HunkMap>, excluded_files: &Vec<StatItem>, review: &Review,
repo_config: &RepoConfig, access_token: &str, old_review_opt: &Option<Review>) {
if hunkmap_opt.is_none() {
log::error!("[send_hunkmap] Empty hunkmap in send_hunkmap");
Expand All @@ -58,7 +71,7 @@ pub async fn send_hunkmap(hunkmap_opt: &Option<HunkMap>, review: &Review,
let hunkmap_async = hunkmap.clone();
let review_async = review.clone();
let mut repo_config_clone = repo_config.clone();
process_relevance(&hunkmap_async, &review_async,
process_relevance(&hunkmap_async, &excluded_files, &review_async,
&mut repo_config_clone, access_token, old_review_opt).await;
}

Expand All @@ -73,16 +86,20 @@ fn hunk_already_exists(review: &Review) -> bool {
log::debug!("[hunk_already_exists] Hunk already in db!");
return true;
}
pub async fn process_review_changes(review: &Review) -> Option<HunkMap>{
log::info!("Processing changes in code...");
let mut prvec = Vec::<PrHunkItem>::new();

fn get_included_and_excluded_files(review: &Review) -> Option<(Vec<StatItem>, Vec<StatItem>)> {
let fileopt = get_excluded_files(&review);
log::debug!("[process_review_changes] fileopt = {:?}", &fileopt);
if fileopt.is_none() {
log::error!("[process_review_changes] No files to review for PR {}", review.id());
return None;
}
let (_, smallfiles) = fileopt.expect("fileopt is empty");
let (excluded_files, smallfiles) = fileopt.expect("fileopt is empty");
return Some(( excluded_files, smallfiles));
}

async fn calculate_hunkmap(review: &Review, smallfiles: &Vec<StatItem>) -> Option<HunkMap> {
let mut prvec = Vec::<PrHunkItem>::new();
let diffmap = generate_diff(&review, &smallfiles);
log::debug!("[process_review_changes] diffmap = {:?}", &diffmap);
let linemap = process_diffmap(&diffmap);
Expand All @@ -109,6 +126,7 @@ pub async fn commit_check(review: &Review, access_token: &str) {
if !commit_exists(&review.base_head_commit(), &review.clone_dir())
|| !commit_exists(&review.pr_head_commit(), &review.clone_dir()) {
log::info!("Executing git pull on repo {}...", &review.repo_name());
thread::sleep(Duration::from_secs(1));
git_pull(review, access_token).await;
}
}
Expand Down Expand Up @@ -213,7 +231,7 @@ fn create_and_save_github_review_object(deserialized_data: &Value) -> Option<Rev
let repo_provider = ProviderEnum::Github.to_string().to_lowercase();
let clone_opt = get_clone_url_clone_dir(&repo_provider, &repo_owner, &repo_name);
if clone_opt.is_none() {
log::error!("[create_and_save_github_review_object] Unable to get clone url and directory for bitbucket review");
log::error!("[create_and_save_github_review_object] Unable to get clone url and directory for github review");
return None;
}
let (clone_url, clone_dir) = clone_opt.expect("Empty clone_opt");
Expand Down
4 changes: 1 addition & 3 deletions vibi-dpu/src/core/trigger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,7 @@ pub async fn process_trigger(message_data: &Vec<u8>) {
// commit_check
commit_check(&review, &access_token).await;
// process_review_changes
let hunkmap_opt = process_review_changes(&review).await;
// send_hunkmap
send_hunkmap(&hunkmap_opt, &review, &repo_config, &access_token, &None).await;
process_review_changes(&review, &repo_config, &access_token, &None).await;
}

fn parse_message_fields(msg: &Value) -> Option<TriggerReview> {
Expand Down
2 changes: 1 addition & 1 deletion vibi-dpu/src/db/review.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ pub fn get_review_from_db(repo_name: &str, repo_owner: &str,
let review_res = serde_json::from_slice(&ivec);
if let Err(e) = review_res {
log::error!(
"[get_handles_from_db] Failed to deserialize review from json: {:?}",
"[get_review_from_db] Failed to deserialize review from json: {:?}",
e
);
return None;
Expand Down
Loading

0 comments on commit 0660db6

Please sign in to comment.