Skip to content

Commit

Permalink
Merge branch 'master' into refactored_pattern2
Browse files Browse the repository at this point in the history
  • Loading branch information
aravij authored Jul 10, 2020
2 parents 22848d4 + e604d7c commit 27cd420
Show file tree
Hide file tree
Showing 204 changed files with 2,129 additions and 343 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

[![PyPi version](https://img.shields.io/pypi/v/aibolit.svg)](https://pypi.org/project/aibolit/)
[![Build Status](https://travis-ci.org/cqfn/aibolit.svg)](https://travis-ci.org/cqfn/aibolit)
[![Build status](https://ci.appveyor.com/api/projects/status/wjjcd6amnt3p2tdg?svg=true)](https://ci.appveyor.com/project/yegor256/aibolit-lyf56)
[![Build status](https://ci.appveyor.com/api/projects/status/gr3eqxq5pr87kpwg?svg=true)](https://ci.appveyor.com/project/yegor256/aibolit)
[![Hits-of-Code](https://hitsofcode.com/github/cqfn/aibolit)](https://hitsofcode.com/view/github/cqfn/aibolit)
[![Test Coverage](https://img.shields.io/codecov/c/github/cqfn/aibolit.svg)](https://codecov.io/github/cqfn/aibolit?branch=master)
[![Maintainability](https://api.codeclimate.com/v1/badges/fd7e32d8472b4d5e8ecb/maintainability)](https://codeclimate.com/github/cqfn/aibolit/maintainability)
Expand Down
128 changes: 121 additions & 7 deletions aibolit/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,13 @@
import sys
import time
import traceback
from collections import defaultdict
from os import scandir
from pathlib import Path
from sys import stdout
from typing import List
from typing import List, Any, Dict, Tuple

import javalang
import numpy as np # type: ignore
import requests
from lxml import etree # type: ignore
Expand All @@ -45,7 +48,7 @@
from aibolit import __version__
from aibolit.config import Config
from aibolit.ml_pipeline.ml_pipeline import train_process, collect_dataset
from aibolit.model.model import TwoFoldRankingModel, Dataset # type: ignore # noqa: F401
from aibolit.utils.ast_builder import build_ast

dir_path = os.path.dirname(os.path.realpath(__file__))

Expand Down Expand Up @@ -159,6 +162,90 @@ def __count_value(value_dict, input_params, code_lines_dict, java_file: str, is_
)


def flatten(l):
return [item for sublist in l for item in sublist]


def add_pattern_if_ignored(
dct: Dict[str, Any],
pattern_item: Dict[Any, Any],
results_list: List[Any]) -> None:
""" If pattern code is not ignored, add it to the result list
:param dct: dict, where key is pattern, value is list of lines range to ignore
:param pattern_item: pattern dict which was get after `inference` function
:param results_list: result list to add
"""
ignored_lines = dct.get(pattern_item['pattern_code'])
if ignored_lines:
for place in ignored_lines:
# get lines range of ignored code
start_line_to_ignore = place[0]
end_line_to_ignore = place[1]
new_code_lines = []
for line in pattern_item['code_lines']:
if (line >= start_line_to_ignore) and (line <= end_line_to_ignore):
continue
else:
new_code_lines.append(line)
pattern_item['code_lines'] = new_code_lines
if len(pattern_item['code_lines']) > 0:
results_list.append(pattern_item)
else:
results_list.append(pattern_item)


def find_annotation_by_node_type(
tree: javalang.tree.CompilationUnit,
node_type) -> Dict[Any, Any]:
"""Search nodes with annotations.
:param tree: javalang.tree
:param node_type: Node type of javalang.tree
:return
dict with annotations, where key is node, value is list of string annotations;
"""
annonations = defaultdict(list)
for _, node in tree.filter(node_type):
if node.annotations:
for a in node.annotations:
if hasattr(a.element, 'value'):
if 'aibolit' in a.element.value:
annonations[node].append(
a.element.value.split('.')[1].rstrip('\"')
)
elif hasattr(a.element, 'values'):
for j in a.element.values:
if 'aibolit' in j.value:
annonations[node].append(
j.value.split('.')[1].rstrip('\"')
)
return annonations


def find_start_and_end_lines(node) -> Tuple[int, int]:
max_line = node.position.line

def traverse(node):
if hasattr(node, 'children'):
for child in node.children:
if isinstance(child, list) and (len(child) > 0):
for item in child:
traverse(item)
else:
if hasattr(child, '_position'):
nonlocal max_line
if child._position.line > max_line:
max_line = child._position.line
return
else:
return

traverse(node)
return node.position.line, max_line


def calculate_patterns_and_metrics(file, args):
code_lines_dict = input_params = {} # type: ignore
error_string = None
Expand Down Expand Up @@ -241,6 +328,23 @@ def run_recommend_for_file(file: str, args):
:return: dict with code lines, filename and pattern name
"""
java_file = str(Path(os.getcwd(), file))
tree = build_ast(file)
classes_with_annonations = find_annotation_by_node_type(tree, javalang.tree.ClassDeclaration)
functions_with_annotations = find_annotation_by_node_type(tree, javalang.tree.MethodDeclaration)
fields_with_annotations = find_annotation_by_node_type(tree, javalang.tree.FieldDeclaration)
classes_with_patterns_ignored = flatten(
[pattern_code for node, pattern_code in classes_with_annonations.items()])
patterns_ignored = defaultdict(list)

for node, patterns_list in functions_with_annotations.items():
start_pos, end_pos = find_start_and_end_lines(node)
for p in patterns_list:
patterns_ignored[p].append([start_pos, end_pos])

for node, patterns_list in fields_with_annotations.items():
for p in patterns_list:
patterns_ignored[p].append([node.position.line, node.position.line])

input_params, code_lines_dict, error_string = calculate_patterns_and_metrics(java_file, args)

if not input_params:
Expand All @@ -252,6 +356,17 @@ def run_recommend_for_file(file: str, args):
error_string = 'Empty java file; ncss = 0'
else:
results_list = inference(input_params, code_lines_dict, args)
new_results: List[Any] = []
for pattern_item in results_list:
# check if the whole class is suppressed
if pattern_item['pattern_code'] not in classes_with_patterns_ignored:
# then check if patterns are ignored in fields or functions
add_pattern_if_ignored(patterns_ignored, pattern_item, new_results)
# add_pattern_if_ignored(patterns_for_fields_ignored, pattern_item, new_results)
else:
continue

results_list = new_results

if error_string:
ncss = 0
Expand Down Expand Up @@ -586,12 +701,11 @@ def handle_exclude_command_line(args):


def format_converter_for_pattern(results, sorted_by=None):
"""Reformat data where data are sorted by patterns importance
"""
Reformat data where data are sorted by patterns importance
(it is already sorted in the input).
Then lines are sorted in ascending order."""

def flatten(l):
return [item for sublist in l for item in sublist]
Then lines are sorted in ascending order.
"""

for file in results:
items = file.get('results')
Expand Down
Loading

0 comments on commit 27cd420

Please sign in to comment.