diff --git a/analyzer/codechecker_analyzer/analysis_manager.py b/analyzer/codechecker_analyzer/analysis_manager.py index a981624533..af1cc4e908 100644 --- a/analyzer/codechecker_analyzer/analysis_manager.py +++ b/analyzer/codechecker_analyzer/analysis_manager.py @@ -35,66 +35,52 @@ LOG = get_logger('analyzer') -def print_analyzer_statistic_summary(statistics, status, msg=None): +def print_analyzer_statistic_summary(metadata_analyzers, status, msg=None): """ Print analyzer statistic summary for the given status code with the given section heading message. """ - has_status = sum((res.get(status, 0) for res in - (statistics[i] for i in statistics))) + has_status = False + for _, analyzer in metadata_analyzers.items(): + if analyzer.get('analyzer_statistics', {}).get(status): + has_status = True + break if has_status and msg: LOG.info(msg) - for analyzer_type, res in statistics.items(): - successful = res[status] - if successful: - LOG.info(" %s: %s", analyzer_type, successful) - - -def worker_result_handler(results, metadata, output_path, analyzer_binaries): - """ - Print the analysis summary. - """ + for analyzer_type, analyzer in metadata_analyzers.items(): + res = analyzer.get('analyzer_statistics', {}).get(status) + if res: + LOG.info(" %s: %s", analyzer_type, res) - if metadata is None: - metadata = {} +def worker_result_handler(results, metadata_tool, output_path, + analyzer_binaries): + """ Print the analysis summary. """ skipped_num = 0 reanalyzed_num = 0 - statistics = {} - + metadata_analyzers = metadata_tool['analyzers'] for res, skipped, reanalyzed, analyzer_type, _, sources in results: + statistics = metadata_analyzers[analyzer_type]['analyzer_statistics'] if skipped: skipped_num += 1 else: if reanalyzed: reanalyzed_num += 1 - if analyzer_type not in statistics: - analyzer_bin = analyzer_binaries[analyzer_type] - analyzer_version = \ - metadata.get('versions', {}).get(analyzer_bin) - - statistics[analyzer_type] = { - "failed": 0, - "failed_sources": [], - "successful": 0, - "version": analyzer_version - } - if res == 0: - statistics[analyzer_type]['successful'] += 1 + statistics['successful'] += 1 else: - statistics[analyzer_type]['failed'] += 1 - statistics[analyzer_type]['failed_sources'].append(sources) + statistics['failed'] += 1 + statistics['failed_sources'].append(sources) LOG.info("----==== Summary ====----") - print_analyzer_statistic_summary(statistics, + print_analyzer_statistic_summary(metadata_analyzers, 'successful', 'Successfully analyzed') - print_analyzer_statistic_summary(statistics, + print_analyzer_statistic_summary(metadata_analyzers, 'failed', 'Failed to analyze') @@ -103,8 +89,7 @@ def worker_result_handler(results, metadata, output_path, analyzer_binaries): if skipped_num: LOG.info("Skipped compilation commands: %d", skipped_num) - metadata['skipped'] = skipped_num - metadata['analyzer_statistics'] = statistics + metadata_tool['skipped'] = skipped_num # check() created the result .plist files and additional, per-analysis # meta information in forms of .plist.source files. @@ -121,9 +106,9 @@ def worker_result_handler(results, metadata, output_path, analyzer_binaries): err_file, _ = os.path.splitext(f) plist_file = os.path.basename(err_file) + ".plist" plist_file = os.path.join(output_path, plist_file) - metadata['result_source_files'].pop(plist_file, None) + metadata_tool['result_source_files'].pop(plist_file, None) - metadata['result_source_files'].update(source_map) + metadata_tool['result_source_files'].update(source_map) # Progress reporting. @@ -680,7 +665,7 @@ def skip_cpp(compile_actions, skip_handler): def start_workers(actions_map, actions, context, analyzer_config_map, - jobs, output_path, skip_handler, metadata, + jobs, output_path, skip_handler, metadata_tool, quiet_analyze, capture_analysis_output, timeout, ctu_reanalyze_on_failure, statistics_data, manager, compile_cmd_count): @@ -752,7 +737,7 @@ def signal_handler(signum, frame): analyzed_actions, 1, callback=lambda results: worker_result_handler( - results, metadata, output_path, + results, metadata_tool, output_path, context.analyzer_binaries) ).get(31557600) diff --git a/analyzer/codechecker_analyzer/analyzer.py b/analyzer/codechecker_analyzer/analyzer.py index 3fbd153288..f2ece9e6f0 100644 --- a/analyzer/codechecker_analyzer/analyzer.py +++ b/analyzer/codechecker_analyzer/analyzer.py @@ -140,7 +140,7 @@ def __get_statistics_data(args, manager): return statistics_data -def perform_analysis(args, skip_handler, context, actions, metadata, +def perform_analysis(args, skip_handler, context, actions, metadata_tool, compile_cmd_count): """ Perform static analysis via the given (or if not, all) analyzers, @@ -212,18 +212,28 @@ def perform_analysis(args, skip_handler, context, actions, metadata, config_map[ClangSA.ANALYZER_NAME].set_checker_enabled( ReturnValueCollector.checker_collect, False) - # Save some metadata information. - versions = __get_analyzer_version(context, config_map) - metadata['versions'].update(versions) + check_env = env.extend(context.path_env_extra, + context.ld_lib_path_extra) - metadata['checkers'] = {} + # Save some metadata information. for analyzer in analyzers: - metadata['checkers'][analyzer] = {} + metadata_info = { + 'checkers': {}, + 'analyzer_statistics': { + "failed": 0, + "failed_sources": [], + "successful": 0, + "version": None}} for check, data in config_map[analyzer].checks().items(): state, _ = data - metadata['checkers'][analyzer].update( - {check: state == CheckerState.enabled}) + metadata_info['checkers'].update({ + check: state == CheckerState.enabled}) + + version = config_map[analyzer].get_version(check_env) + metadata_info['analyzer_statistics']['version'] = version + + metadata_tool['analyzers'][analyzer] = metadata_info if ctu_collect: shutil.rmtree(ctu_dir, ignore_errors=True) @@ -291,7 +301,7 @@ def perform_analysis(args, skip_handler, context, actions, metadata, config_map, args.jobs, args.output_path, skip_handler, - metadata, + metadata_tool, 'quiet' in args, 'capture_analysis_output' in args, args.timeout if 'timeout' in args @@ -311,8 +321,8 @@ def perform_analysis(args, skip_handler, context, actions, metadata, end_time = time.time() LOG.info("Analysis length: %s sec.", end_time - start_time) - metadata['timestamps'] = {'begin': start_time, - 'end': end_time} + metadata_tool['timestamps'] = {'begin': start_time, + 'end': end_time} if ctu_collect and ctu_analyze: shutil.rmtree(ctu_dir, ignore_errors=True) diff --git a/analyzer/codechecker_analyzer/analyzers/config_handler.py b/analyzer/codechecker_analyzer/analyzers/config_handler.py index 3a9e156d05..87399bdbd5 100644 --- a/analyzer/codechecker_analyzer/analyzers/config_handler.py +++ b/analyzer/codechecker_analyzer/analyzers/config_handler.py @@ -12,6 +12,7 @@ import collections import os import platform +import subprocess import sys from codechecker_common.logger import get_logger @@ -73,6 +74,23 @@ def analyzer_plugins(self): and f.endswith(".so")] return analyzer_plugins + def get_version(self, env=None): + """ Get analyzer version information. """ + version = [self.analyzer_binary, '--version'] + try: + output = subprocess.check_output(version, + env=env, + universal_newlines=True, + encoding="utf-8", + errors="ignore") + return output + except (subprocess.CalledProcessError, OSError) as oerr: + LOG.warning("Failed to get analyzer version: %s", + ' '.join(version)) + LOG.warning(oerr) + + return None + def add_checker(self, checker_name, description=None, state=None): """ Add additional checker. If no state argument is given, the actual usage diff --git a/analyzer/codechecker_analyzer/cmd/analyze.py b/analyzer/codechecker_analyzer/cmd/analyze.py index cf2c01846e..51476722ec 100644 --- a/analyzer/codechecker_analyzer/cmd/analyze.py +++ b/analyzer/codechecker_analyzer/cmd/analyze.py @@ -619,7 +619,7 @@ def __cleanup_metadata(metadata_prev, metadata): if not metadata_prev: return - result_src_files = metadata_prev['result_source_files'] + result_src_files = __get_result_source_files(metadata_prev) for plist_file, source_file in result_src_files.items(): if not os.path.exists(source_file): try: @@ -632,6 +632,19 @@ def __cleanup_metadata(metadata_prev, metadata): LOG.warning("Failed to remove plist file: %s", plist_file) +def __get_result_source_files(metadata): + """ Get result source files from the given metadata. """ + if 'result_source_files' in metadata: + return metadata['result_source_files'] + + result_src_files = {} + for tool in metadata.get('tools', {}): + r_src_files = tool.get('result_source_files', {}) + result_src_files.update(r_src_files.items()) + + return result_src_files + + def main(args): """ Perform analysis on the given logfiles and store the results in a machine- @@ -762,26 +775,31 @@ def main(args): json.dump(actions, f, cls=log_parser.CompileCommandEncoder) - metadata = {'action_num': len(actions), - 'command': sys.argv, - 'versions': { - 'codechecker': "{0} ({1})".format( - context.package_git_tag, - context.package_git_hash)}, - 'working_directory': os.getcwd(), - 'output_path': args.output_path, - 'result_source_files': {}} + metadata = { + 'version': 2, + 'tools': [{ + 'name': 'codechecker', + 'action_num': len(actions), + 'command': sys.argv, + 'version': "{0} ({1})".format(context.package_git_tag, + context.package_git_hash), + 'working_directory': os.getcwd(), + 'output_path': args.output_path, + 'result_source_files': {}, + 'analyzers': {} + }]} + metadata_tool = metadata['tools'][0] if 'name' in args: - metadata['name'] = args.name + metadata_tool['run_name'] = args.name # Update metadata dictionary with old values. metadata_file = os.path.join(args.output_path, 'metadata.json') metadata_prev = None if os.path.exists(metadata_file): metadata_prev = load_json_or_empty(metadata_file) - metadata['result_source_files'] = \ - dict(metadata_prev['result_source_files']) + metadata_tool['result_source_files'] = \ + __get_result_source_files(metadata_prev) CompileCmdParseCount = \ collections.namedtuple('CompileCmdParseCount', @@ -809,7 +827,7 @@ def main(args): compile_cmd_count.analyze) analyzer.perform_analysis(args, skip_handler, context, actions, - metadata, + metadata_tool, compile_cmd_count) __update_skip_file(args) diff --git a/analyzer/codechecker_analyzer/cmd/parse.py b/analyzer/codechecker_analyzer/cmd/parse.py index 5e3c01a392..f2113db9a8 100644 --- a/analyzer/codechecker_analyzer/cmd/parse.py +++ b/analyzer/codechecker_analyzer/cmd/parse.py @@ -457,10 +457,17 @@ def parse(plist_file, metadata_dict, rh, file_report_map): LOG.debug("Parsing input file '%s'", plist_file) - if 'result_source_files' in metadata_dict and \ - plist_file in metadata_dict['result_source_files']: + result_source_files = {} + if 'result_source_files' in metadata_dict: + result_source_files = metadata_dict['result_source_files'] + else: + for tool in metadata_dict.get('tools', {}): + result_src_files = tool.get('result_source_files', {}) + result_source_files.update(result_src_files.items()) + + if plist_file in result_source_files: analyzed_source_file = \ - metadata_dict['result_source_files'][plist_file] + result_source_files[plist_file] if analyzed_source_file not in file_report_map: file_report_map[analyzed_source_file] = [] diff --git a/analyzer/tests/functional/analyze_and_parse/test_files/context_sensitive_hash_clang_tidy.output b/analyzer/tests/functional/analyze_and_parse/test_files/context_sensitive_hash_clang_tidy.output index 45c35423ab..4ee0c35ac4 100644 --- a/analyzer/tests/functional/analyze_and_parse/test_files/context_sensitive_hash_clang_tidy.output +++ b/analyzer/tests/functional/analyze_and_parse/test_files/context_sensitive_hash_clang_tidy.output @@ -1,7 +1,7 @@ NORMAL#CodeChecker log --output $LOGFILE$ --build "make context_hash" --quiet NORMAL#CodeChecker analyze $LOGFILE$ --output $OUTPUT$ --analyzers clang-tidy NORMAL#CodeChecker parse $OUTPUT$ --print-steps -CHECK#CodeChecker check --build "make context_hash" --output $OUTPUT$ --quiet --print-steps --analyer clang-tidy +CHECK#CodeChecker check --build "make context_hash" --output $OUTPUT$ --quiet --print-steps --analyzers clang-tidy -------------------------------------------------------------------------------- [] - Starting build ... [] - Build finished successfully. diff --git a/docs/web/user_guide.md b/docs/web/user_guide.md index 1ded036927..f5ebfc4353 100644 --- a/docs/web/user_guide.md +++ b/docs/web/user_guide.md @@ -173,6 +173,8 @@ database. positional arguments: file/folder The analysis result files and/or folders containing analysis results which should be parsed and printed. + If multiple report directories are given, OFF and + UNAVAILABLE detection statuses will not be available. (default: /home//.codechecker/reports) optional arguments: diff --git a/web/client/codechecker_client/cmd/store.py b/web/client/codechecker_client/cmd/store.py index 8c098488d0..7903ddc930 100644 --- a/web/client/codechecker_client/cmd/store.py +++ b/web/client/codechecker_client/cmd/store.py @@ -24,6 +24,7 @@ from codechecker_api_shared.ttypes import RequestFailed, ErrorCode from codechecker_client import client as libclient +from codechecker_client.metadata import merge_metadata_json from codechecker_common import logger from codechecker_common import util @@ -102,7 +103,9 @@ def add_arguments_to_parser(parser): 'reports'), help="The analysis result files and/or folders " "containing analysis results which should be " - "parsed and printed.") + "parsed and printed. If multiple report " + "directories are given, OFF and UNAVAILABLE " + "detection statuses will not be available.") parser.add_argument('-t', '--type', '--input-format', dest="input_format", @@ -180,19 +183,27 @@ def __get_run_name(input_list): """Create a runname for the stored analysis from the input list.""" # Try to create a name from the metada JSON(s). - names = [] + names = set() for input_path in input_list: metafile = os.path.join(input_path, "metadata.json") if os.path.isdir(input_path) and os.path.exists(metafile): metajson = util.load_json_or_empty(metafile) - if 'name' in metajson: - names.append(metajson['name']) + if 'version' in metajson and metajson['version'] >= 2: + for tool in metajson.get('tools', {}): + name = tool.get('run_name') else: - names.append("unnamed result folder") + name = metajson.get('name') - if len(names) == 1 and names[0] != "unnamed result folder": - return names[0] + if not name: + name = "unnamed result folder" + + names.add(name) + + if len(names) == 1: + name = names.pop() + if name != "unnamed result folder": + return name elif len(names) > 1: return "multiple projects: " + ', '.join(names) else: @@ -276,7 +287,8 @@ def collect_file_hashes_from_plist(plist_file): traceback.print_stack() LOG.error('Parsing the plist failed: %s', str(ex)) - files_to_compress = [] + files_to_compress = set() + metadata_json_to_compress = set() changed_files = set() for input_path in inputs: @@ -300,7 +312,7 @@ def collect_file_hashes_from_plist(plist_file): if not missing_files: LOG.debug("Copying file '%s' to ZIP assembly dir...", plist_file) - files_to_compress.append(os.path.join(input_path, f)) + files_to_compress.add(os.path.join(input_path, f)) plist_mtime = util.get_last_mod_time(plist_file) @@ -314,9 +326,9 @@ def collect_file_hashes_from_plist(plist_file): "the following missing source files: %s", plist_file, missing_files) elif f == 'metadata.json': - files_to_compress.append(os.path.join(input_path, f)) + metadata_json_to_compress.add(os.path.join(input_path, f)) elif f == 'skip_file': - files_to_compress.append(os.path.join(input_path, f)) + files_to_compress.add(os.path.join(input_path, f)) if changed_files: changed_files = '\n'.join([' - ' + f for f in changed_files]) @@ -332,6 +344,11 @@ def collect_file_hashes_from_plist(plist_file): zip_target = os.path.join('reports', filename) zipf.write(ftc, zip_target) + merged_metadata = merge_metadata_json(metadata_json_to_compress, + len(inputs)) + zipf.writestr(os.path.join('reports', 'metadata.json'), + json.dumps(merged_metadata)) + if not hash_to_file: LOG.warning("There is no report to store. After uploading these " "results the previous reports become resolved.") diff --git a/web/client/codechecker_client/metadata.py b/web/client/codechecker_client/metadata.py new file mode 100644 index 0000000000..e5037359ce --- /dev/null +++ b/web/client/codechecker_client/metadata.py @@ -0,0 +1,71 @@ +# ------------------------------------------------------------------------- +# The CodeChecker Infrastructure +# This file is distributed under the University of Illinois Open Source +# License. See LICENSE.TXT for details. +# ------------------------------------------------------------------------- +""" +Helpers to manage metadata.json file. +""" + +from codechecker_common.logger import get_logger +from codechecker_common.util import load_json_or_empty + +LOG = get_logger('system') + + +def metadata_v1_to_v2(metadata_dict): + """ Convert old version metadata to a new version format. """ + + if 'version' in metadata_dict and metadata_dict['version'] >= 2: + return metadata_dict + + ret = {'version': 2, 'tools': []} + tool = { + 'name': 'codechecker', + 'version': metadata_dict.get('versions', {}).get('codechecker'), + 'command': metadata_dict.get('command'), + 'output_path': metadata_dict.get('output_path'), + 'skipped': metadata_dict.get('skipped'), + 'timestamps': metadata_dict.get('timestamps'), + 'working_directory': metadata_dict.get('working_directory'), + 'analyzers': {}, + 'result_source_files': metadata_dict.get('result_source_files')} + + for analyzer_name in sorted(metadata_dict['checkers'].keys()): + checkers = metadata_dict['checkers'][analyzer_name] + if not isinstance(checkers, dict): + checkers = {checker_name: True for checker_name in checkers} + + analyzer_stats = metadata_dict.get('analyzer_statistics', {}) + + tool['analyzers'][analyzer_name] = { + 'checkers': checkers, + 'analyzer_statistics': analyzer_stats.get(analyzer_name, {})} + + ret["tools"].append(tool) + + return ret + + +def merge_metadata_json(metadata_files, num_of_report_dir=1): + """ Merge content of multiple metadata files and return it as json. """ + + if not metadata_files: + return {} + + ret = { + 'version': 2, + 'num_of_report_dir': num_of_report_dir, + 'tools': []} + + for metadata_file in metadata_files: + try: + metadata_dict = load_json_or_empty(metadata_file, {}) + metadata = metadata_v1_to_v2(metadata_dict) + for tool in metadata['tools']: + ret['tools'].append(tool) + except Exception as ex: + LOG.warning('Failed to parse %s file with the following error: %s', + metadata_file, str(ex)) + + return ret diff --git a/web/server/codechecker_server/api/report_server.py b/web/server/codechecker_server/api/report_server.py index c8a3c5e5a6..b05ede4171 100644 --- a/web/server/codechecker_server/api/report_server.py +++ b/web/server/codechecker_server/api/report_server.py @@ -50,6 +50,7 @@ AnalyzerStatistic, Report, ReviewStatus, File, Run, RunHistory, \ RunLock, Comment, BugPathEvent, BugReportPoint, \ FileContent, SourceComponent, ExtendedReportData +from ..metadata import MetadataInfoParser from ..tmp import TemporaryDirectory from .db import DBSession, escape_like @@ -2816,8 +2817,9 @@ def massStoreRun(self, name, tag, version, b64zip, force, run_history_time = datetime.now() + metadata_parser = MetadataInfoParser() check_commands, check_durations, cc_version, statistics, \ - checkers = store_handler.metadata_info(metadata_file) + checkers = metadata_parser.get_metadata_info(metadata_file) command = '' if len(check_commands) == 1: diff --git a/web/server/codechecker_server/metadata.py b/web/server/codechecker_server/metadata.py new file mode 100644 index 0000000000..f979b74317 --- /dev/null +++ b/web/server/codechecker_server/metadata.py @@ -0,0 +1,150 @@ +# ------------------------------------------------------------------------- +# The CodeChecker Infrastructure +# This file is distributed under the University of Illinois Open Source +# License. See LICENSE.TXT for details. +# ------------------------------------------------------------------------- +""" +Helpers to parse metadata.json file. +""" + +from abc import ABCMeta +import os + + +from codechecker_common.logger import get_logger +from codechecker_common.util import load_json_or_empty + +LOG = get_logger('system') + + +class MetadataInfoParser(object): + """ Metadata info parser. """ + + __metaclass__ = ABCMeta + + def __get_metadata_info_v1(self, metadata_dict): + """ Get metadata information from the old version json file. """ + check_commands = [] + check_durations = [] + cc_version = None + analyzer_statistics = {} + checkers = {} + + if 'command' in metadata_dict: + check_commands.append(metadata_dict['command']) + if 'timestamps' in metadata_dict: + check_durations.append( + float(metadata_dict['timestamps']['end'] - + metadata_dict['timestamps']['begin'])) + + # Get CodeChecker version. + cc_version = metadata_dict.get('versions', {}).get('codechecker') + + # Get analyzer statistics. + analyzer_statistics = metadata_dict.get('analyzer_statistics', {}) + + # Get analyzer checkers. + checkers = metadata_dict.get('checkers', {}) + + return check_commands, check_durations, cc_version, \ + analyzer_statistics, checkers + + def __insert_analyzer_statistics(self, source, dest, analyzer_name): + """ Insert stats from source to dest of the given analyzer. """ + if not source: + return + + if analyzer_name in dest: + dest[analyzer_name]['failed'] += source['failed'] + dest[analyzer_name]['failed_sources'].extend( + source['failed_sources']) + dest[analyzer_name]['successful'] += source['successful'] + dest[analyzer_name]['version'].update([source['version']]) + else: + dest[analyzer_name] = source + dest[analyzer_name]['version'] = set([source['version']]) + + def __insert_checkers(self, source, dest, analyzer_name): + """ Insert checkers from source to dest of the given analyzer. """ + if analyzer_name in dest: + d_chks = dest[analyzer_name] + for checker in source: + if checker in d_chks and source[checker] != d_chks[checker]: + LOG.warning('Different checker statuses for %s', checker) + dest[analyzer_name][checker] = source[checker] + else: + dest[analyzer_name] = source + + def __get_metadata_info_v2(self, metadata_dict): + """ Get metadata information from the new version format json file. """ + cc_version = [] + check_commands = [] + check_durations = [] + analyzer_statistics = {} + checkers = {} + + tools = metadata_dict.get('tools', {}) + for tool in tools: + if tool['name'] == 'codechecker' and 'version' in tool: + cc_version.append(tool['version']) + + if 'command' in tool: + check_commands.append(tool['command']) + + if 'timestamps' in tool: + check_durations.append( + float(tool['timestamps']['end'] - + tool['timestamps']['begin'])) + + if 'analyzers' in tool: + for analyzer_name, analyzer_info in tool['analyzers'].items(): + self.__insert_analyzer_statistics( + analyzer_info.get('analyzer_statistics', {}), + analyzer_statistics, + analyzer_name) + + self.__insert_checkers( + analyzer_info.get('checkers', {}), + checkers, + analyzer_name) + else: + self.__insert_analyzer_statistics( + tool.get('analyzer_statistics', {}), + analyzer_statistics, + tool['name']) + + self.__insert_checkers(tool.get('checkers', {}), + checkers, + tool['name']) + + # FIXME: if multiple report directories are stored created by different + # codechecker versions there can be multiple results with OFF detection + # status. If additional reports are stored created with cppcheck all + # the cppcheck analyzer results will be marked with unavailable + # detection status. To solve this problem we will return with an empty + # checker set. This way detection statuses will be calculated properly + # but OFF and UNAVAILABLE checker statuses will never be used. + num_of_report_dir = metadata_dict.get('num_of_report_dir') + if num_of_report_dir > 1: + checkers = {} + + cc_version = '; '.join(cc_version) if cc_version else None + + for analyzer in analyzer_statistics: + analyzer_statistics[analyzer]['version'] = \ + '; '.join(analyzer_statistics[analyzer]['version']) + + return check_commands, check_durations, cc_version, \ + analyzer_statistics, checkers + + def get_metadata_info(self, metadata_file): + """ Get metadata information from the given file. """ + if not os.path.isfile(metadata_file): + return [], [], None, {}, {} + + metadata_dict = load_json_or_empty(metadata_file, {}) + + if 'version' in metadata_dict: + return self.__get_metadata_info_v2(metadata_dict) + else: + return self.__get_metadata_info_v1(metadata_dict) diff --git a/web/server/tests/unit/metadata_test_files/v1.json b/web/server/tests/unit/metadata_test_files/v1.json new file mode 100644 index 0000000000..c54930953d --- /dev/null +++ b/web/server/tests/unit/metadata_test_files/v1.json @@ -0,0 +1,50 @@ +{ + "action_num": 1, + "analyzer_statistics": { + "clang-tidy": { + "failed": 0, + "failed_sources": [], + "successful": 10, + "version": "LLVM version 7.0.0" + }, + "clangsa": { + "failed": 0, + "failed_sources": [], + "successful": 1, + "version": "clang version 7.0.0" + } + }, + "checkers": { + "clang-tidy": { + "abseil-string-find-startswith": false, + "bugprone-use-after-move": true + }, + "clangsa": { + "alpha.clone.CloneChecker": false, + "deadcode.DeadStores": true + } + }, + "command": [ + "CodeChecker.py", + "analyze", + "-o", + "/path/to/reports", + "/path/to/build.json" + ], + "output_path": "/path/to/reports", + "result_source_files": { + "/path/to/reports/main.cpp_cd2085addd2b226005b7f9cf1827c082.plist": "/path/to/main.cpp", + "/path/to/reports/reports/main.cpp_ed1ce6c18431138a19465e60aa69a4ba.plist": "/path/to/main.cpp" + }, + "skipped": 0, + "timestamps": { + "begin": 1571297867, + "end": 1571297868 + }, + "versions": { + "clang": "clang version 7.0.0", + "clang-tidy": "LLVM version 7.0.0", + "codechecker": "6.11 (930440d6a6cae80f615146547f4b169c7629d558)" + }, + "working_directory": "/path/to/workspace" +} diff --git a/web/server/tests/unit/metadata_test_files/v2.json b/web/server/tests/unit/metadata_test_files/v2.json new file mode 100644 index 0000000000..4a5aba98ed --- /dev/null +++ b/web/server/tests/unit/metadata_test_files/v2.json @@ -0,0 +1,54 @@ +{ + "version": 2, + "num_of_report_dir": 1, + "tools": [ + { + "name": "codechecker", + "version": "6.11 (930440d6a6cae80f615146547f4b169c7629d558)", + "command": [ + "CodeChecker.py", + "analyze", + "-o", + "/path/to/reports", + "/path/to/build.json" + ], + "output_path": "/path/to/reports", + "skipped": 0, + "timestamps": { + "begin": 1571297867, + "end": 1571297868 + }, + "working_directory": "/path/to/workspace", + "analyzers": { + "clang-tidy": { + "checkers": { + "abseil-string-find-startswith": false, + "bugprone-use-after-move": true + }, + "analyzer_statistics": { + "failed": 0, + "failed_sources": [], + "successful": 10, + "version": "LLVM version 7.0.0" + } + }, + "clangsa": { + "checkers": { + "alpha.clone.CloneChecker": false, + "deadcode.DeadStores": true + }, + "analyzer_statistics": { + "failed": 0, + "failed_sources": [], + "successful": 1, + "version": "clang version 7.0.0" + } + } + }, + "result_source_files": { + "/path/to/reports/main.cpp_cd2085addd2b226005b7f9cf1827c082.plist": "/path/to/main.cpp", + "/path/to/reports/reports/main.cpp_ed1ce6c18431138a19465e60aa69a4ba.plist": "/path/to/main.cpp" + } + } + ] +} diff --git a/web/server/tests/unit/metadata_test_files/v2_multiple.json b/web/server/tests/unit/metadata_test_files/v2_multiple.json new file mode 100644 index 0000000000..8e42b8f7e2 --- /dev/null +++ b/web/server/tests/unit/metadata_test_files/v2_multiple.json @@ -0,0 +1,68 @@ +{ + "version": 2, + "num_of_report_dir": 2, + "tools": [ + { + "name": "codechecker", + "version": "6.11 (930440d6a6cae80f615146547f4b169c7629d558)", + "command": [ + "CodeChecker.py", + "analyze", + "-o", + "/path/to/reports", + "/path/to/build.json" + ], + "output_path": "/path/to/reports", + "skipped": 0, + "timestamps": { + "begin": 1571297867, + "end": 1571297868 + }, + "working_directory": "/path/to/workspace", + "analyzers": { + "clang-tidy": { + "checkers": { + "abseil-string-find-startswith": false, + "bugprone-use-after-move": true + }, + "analyzer_statistics": { + "failed": 0, + "failed_sources": [], + "successful": 10, + "version": "LLVM version 7.0.0" + } + }, + "clangsa": { + "checkers": { + "alpha.clone.CloneChecker": false, + "deadcode.DeadStores": true + }, + "analyzer_statistics": { + "failed": 0, + "failed_sources": [], + "successful": 1, + "version": "clang version 7.0.0" + } + } + }, + "result_source_files": { + "/path/to/reports/main.cpp_cd2085addd2b226005b7f9cf1827c082.plist": "/path/to/main.cpp", + "/path/to/reports/reports/main.cpp_ed1ce6c18431138a19465e60aa69a4ba.plist": "/path/to/main.cpp" + } + }, + { + "name": "cppcheck", + "analyzer_statistics": { + "failed": 0, + "failed_sources": [], + "successful": 1, + "version": "Cppcheck 1.87" + }, + "command": ["cppcheck", "/path/to/main.cpp"], + "timestamps": { + "begin": 1571297867, + "end": 1571297868 + } + } + ] +} diff --git a/web/server/tests/unit/metadata_test_files/v2_multiple_cppcheck.json b/web/server/tests/unit/metadata_test_files/v2_multiple_cppcheck.json new file mode 100644 index 0000000000..40af6caeca --- /dev/null +++ b/web/server/tests/unit/metadata_test_files/v2_multiple_cppcheck.json @@ -0,0 +1,36 @@ +{ + "version": 2, + "num_of_report_dir": 2, + "tools": [ + { + "name": "cppcheck", + "checkers": {}, + "analyzer_statistics": { + "failed": 0, + "failed_sources": [], + "successful": 1, + "version": "Cppcheck 1.87" + }, + "command": ["cppcheck", "/path/to/main.cpp"], + "timestamps": { + "begin": 1571297867, + "end": 1571297868 + } + }, + { + "name": "cppcheck", + "checkers": {}, + "analyzer_statistics": { + "failed": 0, + "failed_sources": [], + "successful": 1, + "version": "Cppcheck 1.86" + }, + "command": ["cppcheck", "-I", "/path/to/include", "/path/to/main.cpp"], + "timestamps": { + "begin": 1571297867, + "end": 1571297868 + } + } + ] +} diff --git a/web/server/tests/unit/test_metadata_merge.py b/web/server/tests/unit/test_metadata_merge.py new file mode 100644 index 0000000000..9d91667f5f --- /dev/null +++ b/web/server/tests/unit/test_metadata_merge.py @@ -0,0 +1,221 @@ +# ----------------------------------------------------------------------------- +# The CodeChecker Infrastructure +# This file is distributed under the University of Illinois Open Source +# License. See LICENSE.TXT for details. +# ----------------------------------------------------------------------------- + +""" Unit tests for the metadata merge. """ + +import json +import os +from tempfile import mkdtemp +import shutil +import unittest + +from codechecker_client.metadata import metadata_v1_to_v2, merge_metadata_json + + +class MetadataMergeTest(unittest.TestCase): + """ Test for merging multiple metadata.json file. """ + + def test_metadata_v1_to_v2(self): + """ Test to convert v1 version format metadata to v2 format. """ + metadata = { + "action_num": 1, + "checkers": { + "clang-tidy": ["a"], + "clangsa": {"b": False} + }, + "command": ["CodeChecker", "analyze"], + "failed": {}, + "output_path": "/path/to/reports", + "result_source_files": { + "/path/to/reports/main.cpp_cd.plist": "/path/to/main.cpp", + "/path/to/reports/main.cpp_ed.plist": "/path/to/main.cpp" + }, + "skipped": 1, + "successful": { + "clang-tidy": 1, + "clangsa": 1 + }, + "timestamps": { + "begin": 1571728770, + "end": 1571728771 + }, + "versions": { + "clang": "clang version 5.0.1", + "clang-tidy": "LLVM version 5.0.1", + "codechecker": "6.5.1 (fd2df38)" + }, + "working_directory": "/path/to/workspace" + } + + expected = { + "version": 2, + "tools": [{ + "name": "codechecker", + "version": "6.5.1 (fd2df38)", + "command": ["CodeChecker", "analyze"], + "output_path": "/path/to/reports", + "skipped": 1, + "timestamps": { + "begin": 1571728770, + "end": 1571728771 + }, + "working_directory": "/path/to/workspace", + "analyzers": { + "clang-tidy": { + "checkers": {"a": True}, + "analyzer_statistics": {} + }, + "clangsa": { + "checkers": {"b": False}, + "analyzer_statistics": {} + } + }, + "result_source_files": { + "/path/to/reports/main.cpp_cd.plist": "/path/to/main.cpp", + "/path/to/reports/main.cpp_ed.plist": "/path/to/main.cpp" + }, + }] + } + + res = metadata_v1_to_v2(metadata) + self.assertEqual(res, expected) + + def test_merge_metadata(self): + """ Test merging multiple metadata files. """ + metadata_v1 = { + "action_num": 1, + "checkers": { + "clang-tidy": ["a"], + "clangsa": {"b": False} + }, + "command": ["CodeChecker", "analyze"], + "failed": {}, + "output_path": "/path/to/reports", + "result_source_files": { + "/path/to/reports/main.cpp_cd.plist": "/path/to/main.cpp", + "/path/to/reports/main.cpp_ed.plist": "/path/to/main.cpp" + }, + "skipped": 1, + "successful": { + "clang-tidy": 1, + "clangsa": 1 + }, + "timestamps": { + "begin": 1571728770, + "end": 1571728771 + }, + "versions": { + "clang": "clang version 5.0.1", + "clang-tidy": "LLVM version 5.0.1", + "codechecker": "6.5.1 (fd2df38)" + }, + "working_directory": "/path/to/workspace" + } + + metadata_v2 = { + "version": 2, + 'num_of_report_dir': 1, + "tools": [{ + "name": "cppcheck", + "analyzer_statistics": { + "failed": 0, + "failed_sources": [], + "successful": 1, + "version": "Cppcheck 1.87" + }, + "command": ["cppcheck", "/path/to/main.cpp"], + "timestamps": { + "begin": 1571297867, + "end": 1571297868 + } + }] + } + + metadata_v3 = { + "version": 2, + 'num_of_report_dir': 1, + "tools": [{ + "name": "cppcheck", + "command": ["cppcheck", "/path/to/main2.cpp"], + "timestamps": { + "begin": 1571297867, + "end": 1571297868 + } + }] + } + + expected = { + "version": 2, + 'num_of_report_dir': 2, + "tools": [{ + "name": "codechecker", + "version": "6.5.1 (fd2df38)", + "command": ["CodeChecker", "analyze"], + "output_path": "/path/to/reports", + "skipped": 1, + "timestamps": { + "begin": 1571728770, + "end": 1571728771 + }, + "working_directory": "/path/to/workspace", + "analyzers": { + "clang-tidy": { + "checkers": {"a": True}, + "analyzer_statistics": {} + }, + "clangsa": { + "checkers": {"b": False}, + "analyzer_statistics": {} + } + }, + "result_source_files": { + "/path/to/reports/main.cpp_cd.plist": "/path/to/main.cpp", + "/path/to/reports/main.cpp_ed.plist": "/path/to/main.cpp" + }}, + { + "name": "cppcheck", + "analyzer_statistics": { + "failed": 0, + "failed_sources": [], + "successful": 1, + "version": "Cppcheck 1.87" + }, + "command": ["cppcheck", "/path/to/main.cpp"], + "timestamps": { + "begin": 1571297867, + "end": 1571297868 + } + }, + { + "name": "cppcheck", + "command": ["cppcheck", "/path/to/main2.cpp"], + "timestamps": { + "begin": 1571297867, + "end": 1571297868 + } + }] + } + + try: + metadata_dir = mkdtemp() + + mf_1 = os.path.join(metadata_dir, 'm1.json') + mf_2 = os.path.join(metadata_dir, 'm2.json') + mf_3 = os.path.join(metadata_dir, 'm3.json') + + with open(mf_1, 'w', encoding='utf-8', errors='ignore') as f1: + f1.write(json.dumps(metadata_v1, indent=2)) + + with open(mf_2, 'w', encoding='utf-8', errors='ignore') as f2: + f2.write(json.dumps(metadata_v2, indent=2)) + + with open(mf_3, 'w', encoding='utf-8', errors='ignore') as f3: + f3.write(json.dumps(metadata_v3, indent=2)) + + res = merge_metadata_json([mf_1, mf_2, mf_3], 2) + self.assertEqual(res, expected) + finally: + shutil.rmtree(metadata_dir) diff --git a/web/server/tests/unit/test_metadata_parser.py b/web/server/tests/unit/test_metadata_parser.py new file mode 100644 index 0000000000..85823a45a3 --- /dev/null +++ b/web/server/tests/unit/test_metadata_parser.py @@ -0,0 +1,216 @@ +# ----------------------------------------------------------------------------- +# The CodeChecker Infrastructure +# This file is distributed under the University of Illinois Open Source +# License. See LICENSE.TXT for details. +# ----------------------------------------------------------------------------- + +""" Unit tests for the metadata parser. """ + +import os +import unittest + +from codechecker_server.metadata import MetadataInfoParser + + +metadata_cc_info = { + 'check_commands': [ + 'CodeChecker.py analyze -o /path/to/reports /path/to/build.json' + ], + 'analysis_duration': [1.0], + 'cc_version': '6.11 (930440d6a6cae80f615146547f4b169c7629d558)', + 'analyzer_statistics': { + 'clangsa': { + 'successful': 1, + 'failed': 0, + 'version': 'clang version 7.0.0', + 'failed_sources': [] + }, + 'clang-tidy': { + 'successful': 10, + 'failed': 0, + 'version': 'LLVM version 7.0.0', + 'failed_sources': [] + } + }, + 'checkers': { + 'clangsa': { + 'alpha.clone.CloneChecker': False, + 'deadcode.DeadStores': True + }, + 'clang-tidy': { + 'bugprone-use-after-move': True, + 'abseil-string-find-startswith': False + } + } +} + +metadata_multiple_info = { + 'check_commands': [ + 'CodeChecker.py analyze -o /path/to/reports /path/to/build.json', + 'cppcheck /path/to/main.cpp' + ], + 'analysis_duration': [1.0, 1.0], + 'cc_version': '6.11 (930440d6a6cae80f615146547f4b169c7629d558)', + 'analyzer_statistics': { + 'clangsa': { + 'successful': 1, + 'failed': 0, + 'version': 'clang version 7.0.0', + 'failed_sources': [] + }, + 'clang-tidy': { + 'successful': 10, + 'failed': 0, + 'version': 'LLVM version 7.0.0', + 'failed_sources': [] + }, + 'cppcheck': { + "failed": 0, + "failed_sources": [], + "successful": 1, + "version": "Cppcheck 1.87" + } + }, + 'checkers': { + 'clangsa': { + 'alpha.clone.CloneChecker': False, + 'deadcode.DeadStores': True + }, + 'clang-tidy': { + 'bugprone-use-after-move': True, + 'abseil-string-find-startswith': False + }, + 'cppcheck': {} + } +} + + +metadata_mult_cppcheck_info = { + 'check_commands': [ + 'cppcheck /path/to/main.cpp', + 'cppcheck -I /path/to/include /path/to/main.cpp' + ], + 'analysis_duration': [1.0, 1.0], + 'cc_version': None, + 'analyzer_statistics': { + 'cppcheck': { + "failed": 0, + "failed_sources": [], + "successful": 2, + "version": "Cppcheck 1.87" + } + }, + 'checkers': { + 'cppcheck': {} + } +} + + +class MetadataInfoParserTest(unittest.TestCase): + """ Testing metadata parser. """ + + @classmethod + def setup_class(self): + """ Initialize the metadata parser and test files. """ + self.__parser = MetadataInfoParser() + + # Already generated plist files for the tests. + self.__metadata_test_files = os.path.join( + os.path.dirname(__file__), 'metadata_test_files') + + def test_metadata_info_v1(self): + """ Get metadata info for old version format json file. """ + metadata_v1 = os.path.join(self.__metadata_test_files, 'v1.json') + check_commands, check_durations, cc_version, analyzer_statistics, \ + checkers = self.__parser.get_metadata_info(metadata_v1) + + self.assertEqual(len(check_commands), 1) + self.assertEqual(' '.join(metadata_cc_info['check_commands']), + ' '.join(check_commands[0])) + + self.assertEqual(metadata_cc_info['analysis_duration'][0], + check_durations[0]) + + self.assertEqual(metadata_cc_info['cc_version'], cc_version) + + self.assertDictEqual(metadata_cc_info['analyzer_statistics'], + analyzer_statistics) + + self.assertDictEqual(metadata_cc_info['checkers'], + checkers) + + def test_metadata_info_v2(self): + """ Get metadata info for new version format json. """ + metadata_v2 = os.path.join(self.__metadata_test_files, 'v2.json') + check_commands, check_durations, cc_version, analyzer_statistics, \ + checkers = self.__parser.get_metadata_info(metadata_v2) + + self.assertEqual(len(check_commands), 1) + self.assertEqual(' '.join(metadata_cc_info['check_commands']), + ' '.join(check_commands[0])) + + self.assertEqual(metadata_cc_info['analysis_duration'][0], + check_durations[0]) + + self.assertEqual(metadata_cc_info['cc_version'], cc_version) + + self.assertDictEqual(metadata_cc_info['analyzer_statistics'], + analyzer_statistics) + + self.assertDictEqual(metadata_cc_info['checkers'], + checkers) + + def test_multiple_metadata_info(self): + """ Get metadata info from multiple analyzers. """ + metadata_multiple = os.path.join(self.__metadata_test_files, + 'v2_multiple.json') + check_commands, check_durations, cc_version, analyzer_statistics, \ + checkers = self.__parser.get_metadata_info(metadata_multiple) + + self.assertEqual(len(check_commands), 2) + check_commands = [' '.join(c) for c in check_commands] + for command in metadata_multiple_info['check_commands']: + self.assertTrue(command in check_commands) + + self.assertEqual(int(sum(metadata_multiple_info['analysis_duration'])), + int(sum(check_durations))) + + self.assertEqual(metadata_multiple_info['cc_version'], cc_version) + + self.assertDictEqual(metadata_multiple_info['analyzer_statistics'], + analyzer_statistics) + + self.assertDictEqual(checkers, {}) + + def test_multiple_cppcheck_metadata_info(self): + """ Get metadata info from multiple cppcheck analyzers. """ + metadata_multiple = os.path.join(self.__metadata_test_files, + 'v2_multiple_cppcheck.json') + check_commands, check_durations, cc_version, analyzer_statistics, \ + checkers = self.__parser.get_metadata_info(metadata_multiple) + + self.assertEqual(len(check_commands), 2) + check_commands = [' '.join(c) for c in check_commands] + for command in metadata_mult_cppcheck_info['check_commands']: + self.assertTrue(command in check_commands) + + self.assertEqual( + int(sum(metadata_mult_cppcheck_info['analysis_duration'])), + int(sum(check_durations))) + + self.assertEqual(metadata_mult_cppcheck_info['cc_version'], cc_version) + + expected_stats = metadata_mult_cppcheck_info['analyzer_statistics'] + expected_cppcheck_stats = expected_stats['cppcheck'] + cppcheck_stats = analyzer_statistics['cppcheck'] + + self.assertEqual(expected_cppcheck_stats['failed'], + cppcheck_stats['failed']) + + self.assertEqual(len(expected_cppcheck_stats['failed_sources']), + len(cppcheck_stats['failed_sources'])) + + self.assertEqual(expected_cppcheck_stats['successful'], + cppcheck_stats['successful']) + + self.assertDictEqual(checkers, {}) diff --git a/web/server/www/scripts/codecheckerviewer/ListOfRuns.js b/web/server/www/scripts/codecheckerviewer/ListOfRuns.js index 8880a059ac..f9c7e7498e 100644 --- a/web/server/www/scripts/codecheckerviewer/ListOfRuns.js +++ b/web/server/www/scripts/codecheckerviewer/ListOfRuns.js @@ -269,6 +269,8 @@ function (declare, dom, ObjectStore, Store, Deferred, topic, Dialog, Button, checkCommand = 'Unavailable!'; } + checkCommand = checkCommand.replace(/; /g, '
'); + that._dialog.set('title', 'Check command'); that._dialog.set('content', checkCommand); that._dialog.show(); diff --git a/web/server/www/scripts/codecheckerviewer/RunHistory.js b/web/server/www/scripts/codecheckerviewer/RunHistory.js index b49be58711..7cf603526b 100644 --- a/web/server/www/scripts/codecheckerviewer/RunHistory.js +++ b/web/server/www/scripts/codecheckerviewer/RunHistory.js @@ -163,6 +163,8 @@ function (declare, ObjectStore, Store, Deferred, DataGrid, Dialog, ContentPane, checkCommand = 'Unavailable!'; } + checkCommand = checkCommand.replace(/; /g, '
'); + that._dialog.set('title', 'Check command'); that._dialog.set('content', checkCommand); that._dialog.show(); diff --git a/web/tests/functional/detection_status/test_detection_status.py b/web/tests/functional/detection_status/test_detection_status.py index 0f04c28eca..3e0f56916d 100644 --- a/web/tests/functional/detection_status/test_detection_status.py +++ b/web/tests/functional/detection_status/test_detection_status.py @@ -10,6 +10,7 @@ import glob import json import os +import shutil import unittest from codechecker_api.codeCheckerDBAccess_v6.ttypes import DetectionStatus, \ @@ -433,3 +434,52 @@ def test_detection_status_off_with_cfg(self): offed_reports = [r for r in reports if r.detectionStatus == DetectionStatus.OFF] self.assertEqual(len(offed_reports), 1) + + def test_store_multiple_dir_no_off(self): + """ + Store multiple report directory and check that no reports are marked + as OFF. + """ + cfg = dict(self._codechecker_cfg) + cfg['checkers'] = ['-d', 'core.DivideZero'] + + self._create_source_file(1) + codechecker.log_and_analyze(cfg, + self._test_dir) + + # Remove metadata.json. + try: + os.remove(os.path.join(cfg['reportdir'], + 'metadata.json')) + except OSError: + pass + + # Analyze the same project to a different report directory and disable + # modernize checkers. + cfg['checkers'] = ['-d', 'deadcode.DeadStores'] + cfg['reportdir'] = self._codechecker_cfg['reportdir'] + "2" + + orig_test_dir = self._test_dir + self._test_dir = self._test_dir + "2" + shutil.copytree(orig_test_dir, self._test_dir) + self._create_source_file(3) + + codechecker.log_and_analyze(cfg, + self._test_dir) + + # Set back test dir. + self._test_dir = orig_test_dir + + # Store two report directory. + cfg['reportdir'] = '{0} {1}'.format( + cfg['reportdir'], + self._codechecker_cfg['reportdir']) + codechecker.store(cfg, 'hello') + + # Check that no reports are marked as OFF. + reports = self._cc_client.getRunResults(None, 100, 0, [], None, None, + False) + + offed_reports = [r for r in reports + if r.detectionStatus == DetectionStatus.OFF] + self.assertEqual(len(offed_reports), 0)