Skip to content

Commit

Permalink
refacto info.rs
Browse files Browse the repository at this point in the history
  • Loading branch information
o2sh committed Oct 16, 2020
1 parent 43db0dc commit 8bcab7b
Show file tree
Hide file tree
Showing 3 changed files with 112 additions and 115 deletions.
63 changes: 10 additions & 53 deletions src/onefetch/info.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,12 @@
use {
crate::onefetch::{
ascii_art::AsciiArt,
cli::Cli,
commit_info::CommitInfo,
error::*,
language::Language,
license::{Detector, LICENSE_FILES},
ascii_art::AsciiArt, cli::Cli, commit_info::CommitInfo, error::*, language::Language,
license::Detector,
},
colored::{Color, ColoredString, Colorize},
git2::Repository,
regex::Regex,
std::{ffi::OsStr, fmt::Write, fs},
std::fmt::Write,
tokio::process::Command,
};

Expand Down Expand Up @@ -312,7 +308,7 @@ impl Info {
.chain_err(|| "Unable to run onefetch on bare git repo")?;
let workdir_str = workdir.to_str().unwrap();
let (languages_stats, number_of_lines) =
Language::get_language_stats(workdir_str, &config.excluded)?;
Language::get_language_statistics(workdir_str, &config.excluded)?;

let (
git_history,
Expand All @@ -336,7 +332,7 @@ impl Info {
let number_of_commits = Info::get_number_of_commits(&git_history);
let authors = Info::get_authors(&git_history, config.number_of_authors);
let last_change = Info::get_date_of_last_commit(&git_history);
let project_license = Info::get_project_license(workdir_str);
let project_license = Detector::new()?.get_project_license(workdir_str);
let dominant_language = Language::get_dominant_language(&languages_stats);

Ok(Info {
Expand Down Expand Up @@ -679,41 +675,12 @@ impl Info {
Ok(output)
}

fn get_project_license(dir: &str) -> Result<String> {
fn is_license_file<S: AsRef<str>>(file_name: S) -> bool {
LICENSE_FILES
.iter()
.any(|&name| file_name.as_ref().starts_with(name))
}

let detector = Detector::new()?;

let mut output = fs::read_dir(dir)
.chain_err(|| "Could not read directory")?
.filter_map(std::result::Result::ok)
.map(|entry| entry.path())
.filter(|entry| {
entry.is_file()
&& entry
.file_name()
.map(OsStr::to_string_lossy)
.map(is_license_file)
.unwrap_or_default()
})
.filter_map(|entry| {
let contents = fs::read_to_string(entry).unwrap_or_default();
detector.analyze(&contents)
})
.collect::<Vec<_>>();

output.sort();
output.dedup();
let output = output.join(", ");

if output == "" {
Ok("??".into())
fn get_formatted_info_label(&self, label: &str, color: Color) -> ColoredString {
let formatted_label = label.color(color);
if self.config.no_bold {
formatted_label
} else {
Ok(output)
formatted_label.bold()
}
}

Expand Down Expand Up @@ -773,16 +740,6 @@ impl Info {
};
Some(color)
}

/// Returns a formatted info label with the desired color and boldness
fn get_formatted_info_label(&self, label: &str, color: Color) -> ColoredString {
let formatted_label = label.color(color);
if self.config.no_bold {
formatted_label
} else {
formatted_label.bold()
}
}
}

fn write_buf<T: std::fmt::Display>(
Expand Down
116 changes: 59 additions & 57 deletions src/onefetch/language.rs
Original file line number Diff line number Diff line change
Expand Up @@ -158,8 +158,26 @@ define_languages! {
}

impl Language {
fn get_languages_stat(languages: &tokei::Languages) -> Option<HashMap<Language, f64>> {
let mut stats = HashMap::new();
pub fn get_dominant_language(languages_stat_vec: &[(Language, f64)]) -> Language {
languages_stat_vec[0].0.clone()
}

pub fn get_language_statistics(
dir: &str,
ignored_directories: &[String],
) -> Result<(Vec<(Language, f64)>, usize)> {
let stats = Language::get_statistics(&dir, ignored_directories);
let language_distribution = Language::get_language_distribution(&stats)
.ok_or("Could not find any source code in this directory")?;
let mut language_distribution_vec: Vec<(_, _)> =
language_distribution.into_iter().collect();
language_distribution_vec.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap().reverse());
let loc = Language::get_total_loc(&stats);
Ok((language_distribution_vec, loc))
}

fn get_language_distribution(languages: &tokei::Languages) -> Option<HashMap<Language, f64>> {
let mut language_distribution = HashMap::new();

for (language_type, language) in languages.iter() {
let mut code = language.code;
Expand All @@ -178,80 +196,64 @@ impl Language {
continue;
}

stats.insert(Language::from(*language_type), code as f64);
language_distribution.insert(Language::from(*language_type), code as f64);
}

let total: f64 = stats.iter().map(|(_, v)| v).sum();
let total: f64 = language_distribution.iter().map(|(_, v)| v).sum();

if total.abs() < f64::EPSILON {
None
} else {
for (_, val) in stats.iter_mut() {
for (_, val) in language_distribution.iter_mut() {
*val /= total;
*val *= 100_f64;
}

Some(stats)
Some(language_distribution)
}
}

pub fn get_language_stats(
dir: &str,
ignored_directories: &[String],
) -> Result<(Vec<(Language, f64)>, usize)> {
let tokei_langs = project_languages(&dir, ignored_directories);
let languages_stat = Language::get_languages_stat(&tokei_langs)
.ok_or("Could not find any source code in this directory")?;
let mut stat_vec: Vec<(_, _)> = languages_stat.into_iter().collect();
stat_vec.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap().reverse());
let loc = get_total_loc(&tokei_langs);
Ok((stat_vec, loc))
}

pub fn get_dominant_language(languages_stat_vec: &[(Language, f64)]) -> Language {
languages_stat_vec[0].0.clone()
fn get_total_loc(languages: &tokei::Languages) -> usize {
languages
.values()
.collect::<Vec<&tokei::Language>>()
.iter()
.fold(0, |sum, val| sum + val.code)
}
}

fn get_total_loc(languages: &tokei::Languages) -> usize {
languages
.values()
.collect::<Vec<&tokei::Language>>()
.iter()
.fold(0, |sum, val| sum + val.code)
}
fn get_statistics(dir: &str, ignored_directories: &[String]) -> tokei::Languages {
use tokei::Config;

fn project_languages(dir: &str, ignored_directories: &[String]) -> tokei::Languages {
use tokei::Config;
let mut languages = tokei::Languages::new();
let required_languages = get_all_language_types();
let tokei_config = Config {
types: Some(required_languages),
..Config::default()
};

let mut languages = tokei::Languages::new();
let required_languages = get_all_language_types();
let tokei_config = Config {
types: Some(required_languages),
..Config::default()
};

if !ignored_directories.is_empty() {
let re = Regex::new(r"((.*)+/)+(.*)").unwrap();
let mut v = Vec::with_capacity(ignored_directories.len());
for ignored in ignored_directories {
if re.is_match(&ignored) {
let p = if ignored.starts_with('/') {
"**"
if !ignored_directories.is_empty() {
let re = Regex::new(r"((.*)+/)+(.*)").unwrap();
let mut v = Vec::with_capacity(ignored_directories.len());
for ignored in ignored_directories {
if re.is_match(&ignored) {
let p = if ignored.starts_with('/') {
"**"
} else {
"**/"
};
v.push(format!("{}{}", p, ignored));
} else {
"**/"
};
v.push(format!("{}{}", p, ignored));
} else {
v.push(String::from(ignored));
v.push(String::from(ignored));
}
}
let ignored_directories_for_ab: Vec<&str> = v.iter().map(|x| &**x).collect();
languages.get_statistics(&[&dir], &ignored_directories_for_ab, &tokei_config);
} else {
let ignored_directories_ref: Vec<&str> =
ignored_directories.iter().map(|s| &**s).collect();
languages.get_statistics(&[&dir], &ignored_directories_ref, &tokei_config);
}
let ignored_directories_for_ab: Vec<&str> = v.iter().map(|x| &**x).collect();
languages.get_statistics(&[&dir], &ignored_directories_for_ab, &tokei_config);
} else {
let ignored_directories_ref: Vec<&str> = ignored_directories.iter().map(|s| &**s).collect();
languages.get_statistics(&[&dir], &ignored_directories_ref, &tokei_config);
}

languages
languages
}
}
48 changes: 43 additions & 5 deletions src/onefetch/license.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
use askalono::{Store, TextData};
use {
crate::onefetch::error::*,
askalono::{Store, TextData},
std::{ffi::OsStr, fs},
};

use crate::onefetch::error::*;

pub const LICENSE_FILES: [&str; 3] = ["LICENSE", "LICENCE", "COPYING"];
const LICENSE_FILES: [&str; 3] = ["LICENSE", "LICENCE", "COPYING"];

static CACHE_DATA: &[u8] = include_bytes!(concat!(
env!("CARGO_MANIFEST_DIR"),
Expand All @@ -21,7 +23,43 @@ impl Detector {
.map_err(|_| "Could not initialize the license detector".into())
}

pub fn analyze(&self, text: &str) -> Option<String> {
pub fn get_project_license(&self, dir: &str) -> Result<String> {
fn is_license_file<S: AsRef<str>>(file_name: S) -> bool {
LICENSE_FILES
.iter()
.any(|&name| file_name.as_ref().starts_with(name))
}

let mut output = fs::read_dir(dir)
.chain_err(|| "Could not read directory")?
.filter_map(std::result::Result::ok)
.map(|entry| entry.path())
.filter(|entry| {
entry.is_file()
&& entry
.file_name()
.map(OsStr::to_string_lossy)
.map(is_license_file)
.unwrap_or_default()
})
.filter_map(|entry| {
let contents = fs::read_to_string(entry).unwrap_or_default();
self.analyze(&contents)
})
.collect::<Vec<_>>();

output.sort();
output.dedup();
let output = output.join(", ");

if output == "" {
Ok("??".into())
} else {
Ok(output)
}
}

fn analyze(&self, text: &str) -> Option<String> {
let matched = self.store.analyze(&TextData::from(text));

if matched.score >= MIN_THRESHOLD {
Expand Down

0 comments on commit 8bcab7b

Please sign in to comment.