diff --git a/examples/verilog/verilog_ams/run.py b/examples/verilog/verilog_ams/run.py index 9d088f8e9..ade54d47f 100644 --- a/examples/verilog/verilog_ams/run.py +++ b/examples/verilog/verilog_ams/run.py @@ -12,8 +12,6 @@ VU = VUnit.from_argv() LIB = VU.add_library("lib") LIB.add_source_files(ROOT / "*.sv") -LIB.add_source_files(ROOT / "*.vams").set_compile_option( - "modelsim.vlog_flags", ["-ams"] -) +LIB.add_source_files(ROOT / "*.vams").set_compile_option("modelsim.vlog_flags", ["-ams"]) VU.main() diff --git a/examples/vhdl/array/run.py b/examples/vhdl/array/run.py index ae5c984c2..f21f3274f 100644 --- a/examples/vhdl/array/run.py +++ b/examples/vhdl/array/run.py @@ -21,9 +21,7 @@ SRC_PATH = Path(__file__).parent / "src" -VU.add_library("lib").add_source_files( - [SRC_PATH / "*.vhd", SRC_PATH / "test" / "*.vhd"] -) +VU.add_library("lib").add_source_files([SRC_PATH / "*.vhd", SRC_PATH / "test" / "*.vhd"]) VU.set_compile_option("ghdl.flags", ["-frelaxed"]) VU.set_sim_option("ghdl.elab_flags", ["-frelaxed"]) diff --git a/examples/vhdl/axi_dma/run.py b/examples/vhdl/axi_dma/run.py index 77fb6eb7f..9878fb7ca 100644 --- a/examples/vhdl/axi_dma/run.py +++ b/examples/vhdl/axi_dma/run.py @@ -24,8 +24,6 @@ SRC_PATH = Path(__file__).parent / "src" -VU.add_library("axi_dma_lib").add_source_files( - [SRC_PATH / "*.vhd", SRC_PATH / "test" / "*.vhd"] -) +VU.add_library("axi_dma_lib").add_source_files([SRC_PATH / "*.vhd", SRC_PATH / "test" / "*.vhd"]) VU.main() diff --git a/examples/vhdl/composite_generics/run.py b/examples/vhdl/composite_generics/run.py index ab7edb696..bb8fba2ba 100644 --- a/examples/vhdl/composite_generics/run.py +++ b/examples/vhdl/composite_generics/run.py @@ -28,18 +28,12 @@ def encode(tb_cfg): TEST.add_config( name="VGA", - generics=dict( - encoded_tb_cfg=encode( - dict(image_width=640, image_height=480, dump_debug_data=False) - ) - ), + generics=dict(encoded_tb_cfg=encode(dict(image_width=640, image_height=480, dump_debug_data=False))), ) TEST.add_config( name="tiny", - generics=dict( - encoded_tb_cfg=encode(dict(image_width=4, image_height=3, dump_debug_data=True)) - ), + generics=dict(encoded_tb_cfg=encode(dict(image_width=4, image_height=3, dump_debug_data=True))), ) VU.main() diff --git a/examples/vhdl/json4vhdl/run.py b/examples/vhdl/json4vhdl/run.py index 7bd601876..1ab294748 100644 --- a/examples/vhdl/json4vhdl/run.py +++ b/examples/vhdl/json4vhdl/run.py @@ -35,8 +35,6 @@ TB.get_tests("stringified*")[0].set_generic("tb_cfg", JSON_STR) TB.get_tests("b16encoded stringified*")[0].set_generic("tb_cfg", b16encode(JSON_STR)) TB.get_tests("JSON file*")[0].set_generic("tb_cfg", JSON_FILE) -TB.get_tests("b16encoded JSON file*")[0].set_generic( - "tb_cfg", b16encode(str(TEST_PATH / JSON_FILE)) -) +TB.get_tests("b16encoded JSON file*")[0].set_generic("tb_cfg", b16encode(str(TEST_PATH / JSON_FILE))) VU.main() diff --git a/examples/vhdl/run/run.py b/examples/vhdl/run/run.py index c6f203b24..ad61dd900 100644 --- a/examples/vhdl/run/run.py +++ b/examples/vhdl/run/run.py @@ -20,8 +20,6 @@ LIB = VU.add_library("lib") LIB.add_source_files(ROOT / "*.vhd") -LIB.entity("tb_with_lower_level_control").scan_tests_from_file( - ROOT / "test_control.vhd" -) +LIB.entity("tb_with_lower_level_control").scan_tests_from_file(ROOT / "test_control.vhd") VU.main() diff --git a/examples/vhdl/vivado/vivado_util.py b/examples/vhdl/vivado/vivado_util.py index 9c536a23a..5d9acc8c9 100644 --- a/examples/vhdl/vivado/vivado_util.py +++ b/examples/vhdl/vivado/vivado_util.py @@ -41,10 +41,7 @@ def compile_standard_libraries(vunit_obj, output_path): simulator_class = SIMULATOR_FACTORY.select_simulator() if not done_token.exists(): - print( - "Compiling standard libraries into %s ..." - % str(Path(output_path).resolve()) - ) + print("Compiling standard libraries into %s ..." % str(Path(output_path).resolve())) simname = simulator_class.name # Vivado calls rivierapro for riviera @@ -61,10 +58,7 @@ def compile_standard_libraries(vunit_obj, output_path): ) else: - print( - "Standard libraries already exists in %s, skipping" - % str(Path(output_path).resolve()) - ) + print("Standard libraries already exists in %s, skipping" % str(Path(output_path).resolve())) for library_name in ["unisim", "unimacro", "unifast", "secureip", "xpm"]: path = str(Path(output_path) / library_name) @@ -88,13 +82,8 @@ def add_project_ip(vunit_obj, project_file, output_path, vivado_path=None, clean compile_order_file = str(Path(output_path) / "compile_order.txt") if clean or not Path(compile_order_file).exists(): - create_compile_order_file( - project_file, compile_order_file, vivado_path=vivado_path - ) + create_compile_order_file(project_file, compile_order_file, vivado_path=vivado_path) else: - print( - "Vivado project Compile order already exists, re-using: %s" - % str(Path(compile_order_file).resolve()) - ) + print("Vivado project Compile order already exists, re-using: %s" % str(Path(compile_order_file).resolve())) return add_from_compile_order_file(vunit_obj, compile_order_file) diff --git a/pyproject.toml b/pyproject.toml index 6dd70afec..cd2b8d2ed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,6 +5,9 @@ requires = [ ] build-backend = "setuptools.build_meta" +[tool.black] +line-length = 120 + [tool.tox] legacy_tox_ini = """ [tox] diff --git a/setup.py b/setup.py index 027b1f923..f4544640b 100644 --- a/setup.py +++ b/setup.py @@ -37,9 +37,7 @@ def find_all_files(directory, endings=None): DATA_FILES = [] DATA_FILES += find_all_files("vunit", endings=[".tcl"]) DATA_FILES += find_all_files(str(Path("vunit") / "vhdl")) -DATA_FILES += find_all_files( - str(Path("vunit") / "verilog"), endings=[".v", ".sv", ".svh"] -) +DATA_FILES += find_all_files(str(Path("vunit") / "verilog"), endings=[".v", ".sv", ".svh"]) DATA_FILES = [os.path.relpath(file_name, "vunit") for file_name in DATA_FILES] setup( diff --git a/tests/acceptance/artificial/verilog/run.py b/tests/acceptance/artificial/verilog/run.py index 76c2c22c0..65ffeac18 100644 --- a/tests/acceptance/artificial/verilog/run.py +++ b/tests/acceptance/artificial/verilog/run.py @@ -29,9 +29,7 @@ def configure_tb_with_parameter_config(): tests[3].add_config( "cfg", - parameters=dict( - set_parameter="set-for-test", config_parameter="set-from-config" - ), + parameters=dict(set_parameter="set-for-test", config_parameter="set-from-config"), ) def post_check(output_path): @@ -40,9 +38,7 @@ def post_check(output_path): tests[4].add_config( "cfg", - parameters=dict( - set_parameter="set-from-config", config_parameter="set-from-config" - ), + parameters=dict(set_parameter="set-from-config", config_parameter="set-from-config"), post_check=post_check, ) @@ -58,7 +54,5 @@ def post_check(output_path): configure_tb_with_parameter_config() configure_tb_same_sim_all_pass(VU) -LIB.module("tb_other_file_tests").scan_tests_from_file( - str(ROOT / "other_file_tests.sv") -) +LIB.module("tb_other_file_tests").scan_tests_from_file(str(ROOT / "other_file_tests.sv")) VU.main() diff --git a/tests/acceptance/artificial/vhdl/run.py b/tests/acceptance/artificial/vhdl/run.py index 4f4081a27..a3df9b08c 100644 --- a/tests/acceptance/artificial/vhdl/run.py +++ b/tests/acceptance/artificial/vhdl/run.py @@ -69,9 +69,7 @@ def configure_tb_set_generic(ui): tb.set_generic("str_quote_val", 'a"b') str_long_num = 512 tb.set_generic("str_long_num", str_long_num) - tb.set_generic( - "str_long_val", "".join(["0123456789abcdef" for x in range(str_long_num)]) - ) + tb.set_generic("str_long_val", "".join(["0123456789abcdef" for x in range(str_long_num)])) def configure_tb_assert_stop_level(ui): @@ -79,10 +77,7 @@ def configure_tb_assert_stop_level(ui): for vhdl_assert_stop_level in ["warning", "error", "failure"]: for report_level in ["warning", "error", "failure"]: - test = tb.test( - "Report %s when VHDL assert stop level = %s" - % (report_level, vhdl_assert_stop_level) - ) + test = tb.test("Report %s when VHDL assert stop level = %s" % (report_level, vhdl_assert_stop_level)) test.set_sim_option("vhdl_assert_stop_level", vhdl_assert_stop_level) @@ -92,7 +87,5 @@ def configure_tb_assert_stop_level(ui): configure_tb_assert_stop_level(VU) LIB.entity("tb_no_generic_override").set_generic("g_val", False) LIB.entity("tb_ieee_warning").test("pass").set_sim_option("disable_ieee_warnings", True) -LIB.entity("tb_other_file_tests").scan_tests_from_file( - str(ROOT / "other_file_tests.vhd") -) +LIB.entity("tb_other_file_tests").scan_tests_from_file(str(ROOT / "other_file_tests.vhd")) VU.main() diff --git a/tests/acceptance/test_artificial.py b/tests/acceptance/test_artificial.py index a5df1a0f2..7f400a245 100644 --- a/tests/acceptance/test_artificial.py +++ b/tests/acceptance/test_artificial.py @@ -89,27 +89,19 @@ def test_run_selected_tests_in_same_sim_test_bench_vhdl(self): @unittest.skipUnless(simulator_is("modelsim"), "Only modelsim supports verilog") def test_run_selected_tests_in_same_sim_test_bench_verilog(self): - self._test_run_selected_tests_in_same_sim_test_bench( - self.artificial_run_verilog - ) + self._test_run_selected_tests_in_same_sim_test_bench(self.artificial_run_verilog) def _test_run_selected_tests_in_same_sim_test_bench(self, run_file): """ Run selected "same_sim" test in isolation """ - self.check( - run_file, exit_code=0, clean=True, args=["*same_sim_some_fail*Test 1*"] - ) + self.check(run_file, exit_code=0, clean=True, args=["*same_sim_some_fail*Test 1*"]) check_report(self.report_file, [("passed", "lib.tb_same_sim_some_fail.Test 1")]) - self.check( - run_file, exit_code=1, clean=False, args=["*same_sim_some_fail*Test 2*"] - ) + self.check(run_file, exit_code=1, clean=False, args=["*same_sim_some_fail*Test 2*"]) check_report(self.report_file, [("failed", "lib.tb_same_sim_some_fail.Test 2")]) - self.check( - run_file, exit_code=0, clean=False, args=["*same_sim_some_fail*Test 3*"] - ) + self.check(run_file, exit_code=0, clean=False, args=["*same_sim_some_fail*Test 3*"]) check_report(self.report_file, [("passed", "lib.tb_same_sim_some_fail.Test 3")]) self.check( @@ -181,9 +173,7 @@ def check(self, run_file, args=None, clean=True, exit_code=0): def test_exit_0_flag(self): self.check(self.artificial_run_vhdl, exit_code=1, args=["lib.tb_fail.all"]) - self.check( - self.artificial_run_vhdl, exit_code=0, args=["--exit-0", "lib.tb_fail.all"] - ) + self.check(self.artificial_run_vhdl, exit_code=0, args=["--exit-0", "lib.tb_fail.all"]) EXPECTED_REPORT = ( diff --git a/tests/acceptance/test_external_run_scripts.py b/tests/acceptance/test_external_run_scripts.py index 15af2b76d..c162fb528 100644 --- a/tests/acceptance/test_external_run_scripts.py +++ b/tests/acceptance/test_external_run_scripts.py @@ -155,9 +155,7 @@ def test_vhdl_composite_generics_example_project(self): ], ) - @unittest.skipUnless( - simulator_is("ghdl"), "Support complex JSON strings as generic" - ) + @unittest.skipUnless(simulator_is("ghdl"), "Support complex JSON strings as generic") def test_vhdl_json4vhdl_example_project(self): self.check(str(ROOT / "examples" / "vhdl" / "json4vhdl" / "run.py")) @@ -171,9 +169,7 @@ def test_vhdl_axi_dma_example_project(self): self.check(str(ROOT / "examples" / "vhdl" / "axi_dma" / "run.py")) def test_vhdl_user_guide_example_project(self): - self.check( - str(ROOT / "examples" / "vhdl" / "user_guide" / "run.py"), exit_code=1 - ) + self.check(str(ROOT / "examples" / "vhdl" / "user_guide" / "run.py"), exit_code=1) check_report( self.report_file, [ @@ -185,9 +181,7 @@ def test_vhdl_user_guide_example_project(self): @unittest.skipUnless(simulator_supports_verilog(), "Verilog") def test_verilog_user_guide_example_project(self): - self.check( - str(ROOT / "examples" / "verilog" / "user_guide" / "run.py"), exit_code=1 - ) + self.check(str(ROOT / "examples" / "verilog" / "user_guide" / "run.py"), exit_code=1) check_report( self.report_file, [ diff --git a/tests/common.py b/tests/common.py index 75660984d..47d72d605 100644 --- a/tests/common.py +++ b/tests/common.py @@ -42,9 +42,7 @@ def check_report(report_file, tests=None): for status, name in tests: if report[name] != status: - raise AssertionError( - "Wrong status of %s got %s expected %s" % (name, report[name], status) - ) + raise AssertionError("Wrong status of %s got %s expected %s" % (name, report[name], status)) num_tests = int(root.attrib["tests"]) assert num_tests == len(tests) @@ -128,9 +126,7 @@ def new_function(*args, **kwargs): return new_function -def get_vhdl_test_bench( - test_bench_name, tests=None, same_sim=False, test_attributes=None -): +def get_vhdl_test_bench(test_bench_name, tests=None, same_sim=False, test_attributes=None): """ Create a valid VUnit test bench @@ -190,9 +186,7 @@ def get_vhdl_test_bench( return contents -def create_vhdl_test_bench_file( - test_bench_name, file_name, tests=None, same_sim=False, test_attributes=None -): +def create_vhdl_test_bench_file(test_bench_name, file_name, tests=None, same_sim=False, test_attributes=None): """ Create a valid VUnit test bench and writes it to file_name """ diff --git a/tests/lint/test_license.py b/tests/lint/test_license.py index b37ded630..6f28b58b6 100644 --- a/tests/lint/test_license.py +++ b/tests/lint/test_license.py @@ -22,10 +22,8 @@ ROOT = Path(RSTR) RE_LICENSE_NOTICE = re.compile( - r"(?P#|--|//) This Source Code Form is subject to the terms of the Mozilla Public" - + "\n" - r"(?P=comment_start) License, v\. 2\.0\. If a copy of the MPL was not distributed with this file," - + "\n" + r"(?P#|--|//) This Source Code Form is subject to the terms of the Mozilla Public" + "\n" + r"(?P=comment_start) License, v\. 2\.0\. If a copy of the MPL was not distributed with this file," + "\n" r"(?P=comment_start) You can obtain one at http://mozilla\.org/MPL/2\.0/\." + "\n" r"(?P=comment_start)" + "\n" r"(?P=comment_start) Copyright \(c\) (?P20\d\d)(-(?P20\d\d))?, " @@ -70,14 +68,12 @@ def _check_license(self, code, file_name): self.assertEqual( int(match.group("first_year")), FIRST_YEAR, - "Expected copyright year range to start with %d in %s" - % (FIRST_YEAR, file_name), + "Expected copyright year range to start with %d in %s" % (FIRST_YEAR, file_name), ) self.assertEqual( int(match.group("last_year")), LAST_YEAR, - "Expected copyright year range to end with %d in %s" - % (LAST_YEAR, file_name), + "Expected copyright year range to end with %d in %s" % (LAST_YEAR, file_name), ) @staticmethod @@ -97,9 +93,7 @@ def _check_no_trailing_whitespace(code, file_name): for _ in range(len(line) - len(sline)): print("~", end="") print() - raise AssertionError( - "Line %i of %s contains trailing whitespace" % (idx + 1, file_name) - ) + raise AssertionError("Line %i of %s contains trailing whitespace" % (idx + 1, file_name)) def fix_license(file_name): diff --git a/tests/unit/test_activehdl_interface.py b/tests/unit/test_activehdl_interface.py index 1babde95c..0d38fe5fd 100644 --- a/tests/unit/test_activehdl_interface.py +++ b/tests/unit/test_activehdl_interface.py @@ -53,9 +53,7 @@ def test_compile_project_vhdl_2008(self, process, check_output): project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2008") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2008")) simif.compile_project(project) process.assert_any_call( [str(Path("prefix") / "vlib"), "lib", "lib_path"], @@ -88,9 +86,7 @@ def test_compile_project_vhdl_2002(self, process, check_output): project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2002") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2002")) simif.compile_project(project) process.assert_any_call( [str(Path("prefix") / "vlib"), "lib", "lib_path"], @@ -123,9 +119,7 @@ def test_compile_project_vhdl_93(self, process, check_output): project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("93") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("93")) simif.compile_project(project) process.assert_any_call( [str(Path("prefix") / "vlib"), "lib", "lib_path"], @@ -303,9 +297,7 @@ def test_compile_project_verilog_include(self, process, check_output): project = Project() project.add_library("lib", "lib_path") write_file("file.v", "") - project.add_source_file( - "file.v", "lib", file_type="verilog", include_dirs=["include"] - ) + project.add_source_file("file.v", "lib", file_type="verilog", include_dirs=["include"]) simif.compile_project(project) process.assert_any_call( [str(Path("prefix") / "vlib"), "lib", "lib_path"], @@ -341,9 +333,7 @@ def test_compile_project_verilog_define(self, process, check_output): project = Project() project.add_library("lib", "lib_path") write_file("file.v", "") - project.add_source_file( - "file.v", "lib", file_type="verilog", defines={"defname": "defval"} - ) + project.add_source_file("file.v", "lib", file_type="verilog", defines={"defname": "defval"}) simif.compile_project(project) process.assert_any_call( [str(Path("prefix") / "vlib"), "lib", "lib_path"], @@ -372,18 +362,14 @@ def test_compile_project_verilog_define(self, process, check_output): ) @mock.patch("vunit.sim_if.activehdl.ActiveHDLInterface.find_prefix") - @mock.patch( - "vunit.sim_if.activehdl.Process", new=MockProcessVersionWithPackageGenerics - ) + @mock.patch("vunit.sim_if.activehdl.Process", new=MockProcessVersionWithPackageGenerics) def test_supports_vhdl_package_generics_true(self, find_prefix): find_prefix.return_value = "" simif = ActiveHDLInterface(prefix="prefix", output_path=self.output_path) self.assertTrue(simif.supports_vhdl_package_generics()) @mock.patch("vunit.sim_if.activehdl.ActiveHDLInterface.find_prefix") - @mock.patch( - "vunit.sim_if.activehdl.Process", new=MockProcessVersionWithoutPackageGenerics - ) + @mock.patch("vunit.sim_if.activehdl.Process", new=MockProcessVersionWithoutPackageGenerics) def test_supports_vhdl_package_generics_false(self, find_prefix): find_prefix.return_value = "" simif = ActiveHDLInterface(prefix="prefix", output_path=self.output_path) @@ -407,9 +393,7 @@ class TestVersionConsumer(unittest.TestCase): Test the VersionConsumer class """ - def _assert_version_correct( - self, version_line, expected_major, expected_minor, expected_minor_letter - ): + def _assert_version_correct(self, version_line, expected_major, expected_minor, expected_minor_letter): """ Assertion function used by tests in this class """ @@ -452,189 +436,90 @@ class TestVersion(unittest.TestCase): def test_lt(self): # Test with letters - self.assertTrue( - TestVersion.low_version_letter < TestVersion.high_version_letter - ) - self.assertFalse( - TestVersion.high_version_letter < TestVersion.low_version_letter - ) + self.assertTrue(TestVersion.low_version_letter < TestVersion.high_version_letter) + self.assertFalse(TestVersion.high_version_letter < TestVersion.low_version_letter) # Test without letters - self.assertTrue( - TestVersion.low_version_no_letter < TestVersion.high_version_no_letter - ) - self.assertFalse( - TestVersion.high_version_no_letter < TestVersion.low_version_no_letter - ) + self.assertTrue(TestVersion.low_version_no_letter < TestVersion.high_version_no_letter) + self.assertFalse(TestVersion.high_version_no_letter < TestVersion.low_version_no_letter) # Both - self.assertTrue( - TestVersion.low_version_letter < TestVersion.high_version_no_letter - ) - self.assertFalse( - TestVersion.high_version_letter < TestVersion.low_version_no_letter - ) + self.assertTrue(TestVersion.low_version_letter < TestVersion.high_version_no_letter) + self.assertFalse(TestVersion.high_version_letter < TestVersion.low_version_no_letter) def test_le(self): # Test equal # Test with letters - self.assertTrue( - TestVersion.high_version_letter <= TestVersion.high_version_letter - ) - self.assertFalse( - TestVersion.high_version_letter <= TestVersion.low_version_letter - ) + self.assertTrue(TestVersion.high_version_letter <= TestVersion.high_version_letter) + self.assertFalse(TestVersion.high_version_letter <= TestVersion.low_version_letter) # Test without letters - self.assertTrue( - TestVersion.high_version_no_letter <= TestVersion.high_version_no_letter - ) - self.assertFalse( - TestVersion.high_version_no_letter <= TestVersion.low_version_no_letter - ) + self.assertTrue(TestVersion.high_version_no_letter <= TestVersion.high_version_no_letter) + self.assertFalse(TestVersion.high_version_no_letter <= TestVersion.low_version_no_letter) # Both - self.assertTrue( - TestVersion.high_version_letter <= TestVersion.high_version_no_letter - ) - self.assertFalse( - TestVersion.high_version_letter <= TestVersion.low_version_no_letter - ) + self.assertTrue(TestVersion.high_version_letter <= TestVersion.high_version_no_letter) + self.assertFalse(TestVersion.high_version_letter <= TestVersion.low_version_no_letter) # Test less than # Test with letters - self.assertTrue( - TestVersion.low_version_letter <= TestVersion.high_version_letter - ) - self.assertFalse( - TestVersion.high_version_letter <= TestVersion.low_version_letter - ) + self.assertTrue(TestVersion.low_version_letter <= TestVersion.high_version_letter) + self.assertFalse(TestVersion.high_version_letter <= TestVersion.low_version_letter) # Test without letters - self.assertTrue( - TestVersion.low_version_no_letter <= TestVersion.high_version_no_letter - ) - self.assertFalse( - TestVersion.high_version_no_letter <= TestVersion.low_version_no_letter - ) + self.assertTrue(TestVersion.low_version_no_letter <= TestVersion.high_version_no_letter) + self.assertFalse(TestVersion.high_version_no_letter <= TestVersion.low_version_no_letter) # Both - self.assertTrue( - TestVersion.low_version_letter <= TestVersion.high_version_no_letter - ) - self.assertFalse( - TestVersion.high_version_letter <= TestVersion.low_version_no_letter - ) + self.assertTrue(TestVersion.low_version_letter <= TestVersion.high_version_no_letter) + self.assertFalse(TestVersion.high_version_letter <= TestVersion.low_version_no_letter) def test_gt(self): # Test with letters - self.assertTrue( - TestVersion.high_version_letter > TestVersion.low_version_letter - ) - self.assertFalse( - TestVersion.low_version_letter > TestVersion.high_version_letter - ) + self.assertTrue(TestVersion.high_version_letter > TestVersion.low_version_letter) + self.assertFalse(TestVersion.low_version_letter > TestVersion.high_version_letter) # Test without letters - self.assertTrue( - TestVersion.high_version_no_letter > TestVersion.low_version_no_letter - ) - self.assertFalse( - TestVersion.low_version_no_letter > TestVersion.high_version_no_letter - ) + self.assertTrue(TestVersion.high_version_no_letter > TestVersion.low_version_no_letter) + self.assertFalse(TestVersion.low_version_no_letter > TestVersion.high_version_no_letter) # Both - self.assertTrue( - TestVersion.high_version_letter > TestVersion.low_version_no_letter - ) - self.assertFalse( - TestVersion.low_version_letter > TestVersion.high_version_no_letter - ) + self.assertTrue(TestVersion.high_version_letter > TestVersion.low_version_no_letter) + self.assertFalse(TestVersion.low_version_letter > TestVersion.high_version_no_letter) def test_ge(self): # Test equal # Test with letters - self.assertTrue( - TestVersion.high_version_letter >= TestVersion.high_version_letter - ) - self.assertFalse( - TestVersion.low_version_letter >= TestVersion.high_version_letter - ) + self.assertTrue(TestVersion.high_version_letter >= TestVersion.high_version_letter) + self.assertFalse(TestVersion.low_version_letter >= TestVersion.high_version_letter) # Test without letters - self.assertTrue( - TestVersion.high_version_no_letter >= TestVersion.high_version_no_letter - ) - self.assertFalse( - TestVersion.low_version_no_letter >= TestVersion.high_version_no_letter - ) + self.assertTrue(TestVersion.high_version_no_letter >= TestVersion.high_version_no_letter) + self.assertFalse(TestVersion.low_version_no_letter >= TestVersion.high_version_no_letter) # Both - self.assertTrue( - TestVersion.high_version_letter_for_mixed - >= TestVersion.high_version_no_letter - ) - self.assertFalse( - TestVersion.low_version_letter >= TestVersion.high_version_no_letter - ) + self.assertTrue(TestVersion.high_version_letter_for_mixed >= TestVersion.high_version_no_letter) + self.assertFalse(TestVersion.low_version_letter >= TestVersion.high_version_no_letter) # Test greater than # Test with letters - self.assertTrue( - TestVersion.high_version_letter >= TestVersion.low_version_letter - ) - self.assertFalse( - TestVersion.low_version_letter >= TestVersion.high_version_letter - ) + self.assertTrue(TestVersion.high_version_letter >= TestVersion.low_version_letter) + self.assertFalse(TestVersion.low_version_letter >= TestVersion.high_version_letter) # Test without letters - self.assertTrue( - TestVersion.high_version_no_letter >= TestVersion.low_version_no_letter - ) - self.assertFalse( - TestVersion.low_version_no_letter >= TestVersion.high_version_no_letter - ) + self.assertTrue(TestVersion.high_version_no_letter >= TestVersion.low_version_no_letter) + self.assertFalse(TestVersion.low_version_no_letter >= TestVersion.high_version_no_letter) # Both - self.assertTrue( - TestVersion.high_version_letter >= TestVersion.low_version_no_letter - ) - self.assertFalse( - TestVersion.low_version_letter >= TestVersion.high_version_no_letter - ) + self.assertTrue(TestVersion.high_version_letter >= TestVersion.low_version_no_letter) + self.assertFalse(TestVersion.low_version_letter >= TestVersion.high_version_no_letter) def test_eq(self): # Test with letters - self.assertTrue( - TestVersion.high_version_letter == TestVersion.high_version_letter - ) - self.assertFalse( - TestVersion.high_version_letter == TestVersion.low_version_letter - ) + self.assertTrue(TestVersion.high_version_letter == TestVersion.high_version_letter) + self.assertFalse(TestVersion.high_version_letter == TestVersion.low_version_letter) # Test without letters - self.assertTrue( - TestVersion.high_version_no_letter == TestVersion.high_version_no_letter - ) - self.assertFalse( - TestVersion.high_version_no_letter == TestVersion.low_version_no_letter - ) + self.assertTrue(TestVersion.high_version_no_letter == TestVersion.high_version_no_letter) + self.assertFalse(TestVersion.high_version_no_letter == TestVersion.low_version_no_letter) # Both - self.assertTrue( - TestVersion.high_version_letter_for_mixed - == TestVersion.high_version_no_letter - ) - self.assertFalse( - TestVersion.high_version_letter == TestVersion.low_version_no_letter - ) + self.assertTrue(TestVersion.high_version_letter_for_mixed == TestVersion.high_version_no_letter) + self.assertFalse(TestVersion.high_version_letter == TestVersion.low_version_no_letter) def test_ne(self): # Test with letters - self.assertTrue( - TestVersion.high_version_letter != TestVersion.low_version_letter - ) - self.assertFalse( - TestVersion.high_version_letter != TestVersion.high_version_letter - ) + self.assertTrue(TestVersion.high_version_letter != TestVersion.low_version_letter) + self.assertFalse(TestVersion.high_version_letter != TestVersion.high_version_letter) # Test without letters - self.assertTrue( - TestVersion.high_version_no_letter != TestVersion.low_version_no_letter - ) - self.assertFalse( - TestVersion.high_version_no_letter != TestVersion.high_version_no_letter - ) + self.assertTrue(TestVersion.high_version_no_letter != TestVersion.low_version_no_letter) + self.assertFalse(TestVersion.high_version_no_letter != TestVersion.high_version_no_letter) # Both - self.assertTrue( - TestVersion.high_version_letter != TestVersion.low_version_no_letter - ) - self.assertFalse( - TestVersion.high_version_letter_for_mixed - != TestVersion.high_version_no_letter - ) + self.assertTrue(TestVersion.high_version_letter != TestVersion.low_version_no_letter) + self.assertFalse(TestVersion.high_version_letter_for_mixed != TestVersion.high_version_no_letter) diff --git a/tests/unit/test_cds_file.py b/tests/unit/test_cds_file.py index 97c42dd75..be015eceb 100644 --- a/tests/unit/test_cds_file.py +++ b/tests/unit/test_cds_file.py @@ -98,9 +98,7 @@ def _check_written_as(self, cds, contents): """ Check that the CDSFile object writes the 'contents to the file """ - with mock.patch( - "vunit.sim_if.cds_file.write_file", autospec=True - ) as write_file: + with mock.patch("vunit.sim_if.cds_file.write_file", autospec=True) as write_file: cds.write("filename") self.assertEqual(len(write_file.mock_calls), 1) args = write_file.mock_calls[0][1] diff --git a/tests/unit/test_check_preprocessor.py b/tests/unit/test_check_preprocessor.py index 3079f7b5d..2e81841dd 100644 --- a/tests/unit/test_check_preprocessor.py +++ b/tests/unit/test_check_preprocessor.py @@ -204,7 +204,10 @@ def test_that_all_vhdl_2008_relational_operators_are_recognized(self): def make_context_msg(left, relation, right): - return ( - '"Expected %s %s %s. Left is " & to_string(%s) & ". Right is " & to_string(%s) & "."' - % (left.replace('"', '""'), relation, right.replace('"', '""'), left, right) + return '"Expected %s %s %s. Left is " & to_string(%s) & ". Right is " & to_string(%s) & "."' % ( + left.replace('"', '""'), + relation, + right.replace('"', '""'), + left, + right, ) diff --git a/tests/unit/test_configuration.py b/tests/unit/test_configuration.py index 9f6ce57c6..110e4dd3f 100644 --- a/tests/unit/test_configuration.py +++ b/tests/unit/test_configuration.py @@ -43,9 +43,7 @@ def test_error_on_setting_unknown_sim_option(self): def test_error_on_setting_illegal_value_sim_option(self): with _create_config() as config: - self.assertRaises( - ValueError, config.set_sim_option, "vhdl_assert_stop_level", "illegal" - ) + self.assertRaises(ValueError, config.set_sim_option, "vhdl_assert_stop_level", "illegal") def test_sim_option_is_not_mutated(self): with _create_config() as config: @@ -60,18 +58,12 @@ def test_does_not_add_tb_path_generic(self): @with_tempdir def test_adds_tb_path_generic(self, tempdir): - design_unit_tb_path = Entity( - "tb_entity_without_tb_path", file_name=str(Path(tempdir) / "file.vhd") - ) + design_unit_tb_path = Entity("tb_entity_without_tb_path", file_name=str(Path(tempdir) / "file.vhd")) tb_path = str(Path(tempdir) / "other_path") - design_unit_tb_path.original_file_name = str( - Path(tb_path) / "original_file.vhd" - ) + design_unit_tb_path.original_file_name = str(Path(tb_path) / "original_file.vhd") design_unit_tb_path.generic_names = ["runner_cfg", "tb_path"] config_tb_path = Configuration("name", design_unit_tb_path) - self.assertEqual( - config_tb_path.generics["tb_path"], (tb_path + "/").replace("\\", "/") - ) + self.assertEqual(config_tb_path.generics["tb_path"], (tb_path + "/").replace("\\", "/")) def test_constructor_adds_no_attributes(self): with _create_config() as config: @@ -96,9 +88,7 @@ def post_check(): return False self.assertEqual( - self._call_post_check( - post_check, output_path="output_path", read_output=None - ), + self._call_post_check(post_check, output_path="output_path", read_output=None), False, ) @@ -107,9 +97,7 @@ def post_check(): return True self.assertEqual( - self._call_post_check( - post_check, output_path="output_path", read_output=None - ), + self._call_post_check(post_check, output_path="output_path", read_output=None), True, ) @@ -118,9 +106,7 @@ def post_check(): pass self.assertEqual( - self._call_post_check( - post_check, output_path="output_path", read_output=None - ), + self._call_post_check(post_check, output_path="output_path", read_output=None), False, ) @@ -187,9 +173,7 @@ def post_check(output): ) def test_call_pre_config_none(self): - self.assertEqual( - self._call_pre_config(None, "output_path", "simulator_output_path"), True - ) + self.assertEqual(self._call_pre_config(None, "output_path", "simulator_output_path"), True) def test_call_pre_config_false(self): def pre_config(): diff --git a/tests/unit/test_csv_logs.py b/tests/unit/test_csv_logs.py index 58642d9b2..6a33d9c2d 100644 --- a/tests/unit/test_csv_logs.py +++ b/tests/unit/test_csv_logs.py @@ -32,9 +32,7 @@ def make_log(directory, contents): """ Make log """ - with NamedTemporaryFile( - "w+", delete=False, dir=directory, suffix=".csv" - ) as file_obj: + with NamedTemporaryFile("w+", delete=False, dir=directory, suffix=".csv") as file_obj: file_obj.write(contents) self._log_files.append(file_obj.name) @@ -136,9 +134,7 @@ def test_should_be_possible_to_add_csv_files(self): self.assertEqual(result, expected_result) def test_should_sort_several_csv_files_with_non_default_fields(self): - csvlogs = CsvLogs( - self._few_fields_files, ["#", "Time", "Level", "Source", "Message"] - ) + csvlogs = CsvLogs(self._few_fields_files, ["#", "Time", "Level", "Source", "Message"]) result = self._write_to_file_and_read_back_result(csvlogs) expected_result = """#,Time,Level,Source,Message diff --git a/tests/unit/test_dependency_graph.py b/tests/unit/test_dependency_graph.py index fa5748e77..12f354d78 100644 --- a/tests/unit/test_dependency_graph.py +++ b/tests/unit/test_dependency_graph.py @@ -26,9 +26,7 @@ def test_should_return_list_of_nodes_when_there_are_no_dependencies(self): graph = DependencyGraph() self._add_nodes_and_dependencies(graph, nodes, []) result = graph.toposort() - self.assertEqual( - result.sort(), nodes.sort(), "Should return the node list in any order" - ) + self.assertEqual(result.sort(), nodes.sort(), "Should return the node list in any order") def test_should_sort_in_topological_order_when_there_are_dependencies(self): nodes = ["a", "b", "c", "d", "e", "f"] diff --git a/tests/unit/test_ghdl_interface.py b/tests/unit/test_ghdl_interface.py index ef42db920..c4d71792f 100644 --- a/tests/unit/test_ghdl_interface.py +++ b/tests/unit/test_ghdl_interface.py @@ -41,9 +41,7 @@ def find_executable_side_effect(name): executables["gtkwave"] = [] GHDLInterface(prefix="prefix", output_path="") - self.assertRaises( - RuntimeError, GHDLInterface, prefix="prefix", output_path="", gui=True - ) + self.assertRaises(RuntimeError, GHDLInterface, prefix="prefix", output_path="", gui=True) @mock.patch("subprocess.check_output", autospec=True) def test_parses_llvm_backend(self, check_output): @@ -105,18 +103,14 @@ def test_assertion_on_unknown_backend(self, check_output): check_output.return_value = version self.assertRaises(AssertionError, GHDLInterface.determine_backend, "prefix") - @mock.patch( - "vunit.sim_if.check_output", autospec=True, return_value="" - ) # pylint: disable=no-self-use + @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") # pylint: disable=no-self-use def test_compile_project_2008(self, check_output): simif = GHDLInterface(prefix="prefix", output_path="") write_file("file.vhd", "") project = Project() project.add_library("lib", "lib_path") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2008") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2008")) simif.compile_project(project) check_output.assert_called_once_with( [ @@ -131,18 +125,14 @@ def test_compile_project_2008(self, check_output): env=simif.get_env(), ) - @mock.patch( - "vunit.sim_if.check_output", autospec=True, return_value="" - ) # pylint: disable=no-self-use + @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") # pylint: disable=no-self-use def test_compile_project_2002(self, check_output): simif = GHDLInterface(prefix="prefix", output_path="") write_file("file.vhd", "") project = Project() project.add_library("lib", "lib_path") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2002") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2002")) simif.compile_project(project) check_output.assert_called_once_with( [ @@ -157,18 +147,14 @@ def test_compile_project_2002(self, check_output): env=simif.get_env(), ) - @mock.patch( - "vunit.sim_if.check_output", autospec=True, return_value="" - ) # pylint: disable=no-self-use + @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") # pylint: disable=no-self-use def test_compile_project_93(self, check_output): simif = GHDLInterface(prefix="prefix", output_path="") write_file("file.vhd", "") project = Project() project.add_library("lib", "lib_path") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("93") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("93")) simif.compile_project(project) check_output.assert_called_once_with( [ @@ -183,9 +169,7 @@ def test_compile_project_93(self, check_output): env=simif.get_env(), ) - @mock.patch( - "vunit.sim_if.check_output", autospec=True, return_value="" - ) # pylint: disable=no-self-use + @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") # pylint: disable=no-self-use def test_compile_project_extra_flags(self, check_output): simif = GHDLInterface(prefix="prefix", output_path="") write_file("file.vhd", "") @@ -212,9 +196,7 @@ def test_compile_project_extra_flags(self, check_output): def test_elaborate_e_project(self): design_unit = Entity("tb_entity", file_name=str(Path("tempdir") / "file.vhd")) - design_unit.original_file_name = str( - Path("tempdir") / "other_path" / "original_file.vhd" - ) + design_unit.original_file_name = str(Path("tempdir") / "other_path" / "original_file.vhd") design_unit.generic_names = ["runner_cfg", "tb_path"] config = Configuration("name", design_unit, sim_options={"ghdl.elab_e": True}) @@ -222,9 +204,7 @@ def test_elaborate_e_project(self): simif = GHDLInterface(prefix="prefix", output_path="") simif._vhdl_standard = VHDL.standard("2008") # pylint: disable=protected-access simif._project = Project() # pylint: disable=protected-access - simif._project.add_library( # pylint: disable=protected-access - "lib", "lib_path" - ) + simif._project.add_library("lib", "lib_path") # pylint: disable=protected-access self.assertEqual( simif._get_command( # pylint: disable=protected-access diff --git a/tests/unit/test_incisive_interface.py b/tests/unit/test_incisive_interface.py index 376da8327..4376f6520 100644 --- a/tests/unit/test_incisive_interface.py +++ b/tests/unit/test_incisive_interface.py @@ -31,23 +31,17 @@ class TestIncisiveInterface(unittest.TestCase): @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") - def test_compile_project_vhdl_2008( - self, check_output, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_compile_project_vhdl_2008(self, check_output, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2008") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2008")) simif.compile_project(project) args_file = str(Path(self.output_path) / "irun_compile_vhdl_file_lib.args") - check_output.assert_called_once_with( - [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() - ) + check_output.assert_called_once_with([str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env()) self.assertEqual( read_file(args_file).splitlines(), [ @@ -59,8 +53,7 @@ def test_compile_project_vhdl_2008( "-v200x -extv200x", "-work work", '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), + '-log "%s"' % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), "-quiet", '-nclibdirname "."', "-makelib lib_path", @@ -86,23 +79,17 @@ def test_compile_project_vhdl_2008( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") - def test_compile_project_vhdl_2002( - self, check_output, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_compile_project_vhdl_2002(self, check_output, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2002") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2002")) simif.compile_project(project) args_file = str(Path(self.output_path) / "irun_compile_vhdl_file_lib.args") - check_output.assert_called_once_with( - [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() - ) + check_output.assert_called_once_with([str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env()) self.assertEqual( read_file(args_file).splitlines(), [ @@ -114,8 +101,7 @@ def test_compile_project_vhdl_2002( "-v200x -extv200x", "-work work", '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), + '-log "%s"' % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), "-quiet", '-nclibdirname "."', "-makelib lib_path", @@ -127,23 +113,17 @@ def test_compile_project_vhdl_2002( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") - def test_compile_project_vhdl_93( - self, check_output, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_compile_project_vhdl_93(self, check_output, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("93") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("93")) simif.compile_project(project) args_file = str(Path(self.output_path) / "irun_compile_vhdl_file_lib.args") - check_output.assert_called_once_with( - [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() - ) + check_output.assert_called_once_with([str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env()) self.assertEqual( read_file(args_file).splitlines(), [ @@ -155,8 +135,7 @@ def test_compile_project_vhdl_93( "-v93", "-work work", '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), + '-log "%s"' % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), "-quiet", '-nclibdirname "."', "-makelib lib_path", @@ -168,9 +147,7 @@ def test_compile_project_vhdl_93( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") - def test_compile_project_vhdl_extra_flags( - self, check_output, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_compile_project_vhdl_extra_flags(self, check_output, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) @@ -181,9 +158,7 @@ def test_compile_project_vhdl_extra_flags( source_file.set_compile_option("incisive.irun_vhdl_flags", ["custom", "flags"]) simif.compile_project(project) args_file = str(Path(self.output_path) / "irun_compile_vhdl_file_lib.args") - check_output.assert_called_once_with( - [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() - ) + check_output.assert_called_once_with([str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env()) self.assertEqual( read_file(args_file).splitlines(), [ @@ -195,8 +170,7 @@ def test_compile_project_vhdl_extra_flags( "-v200x -extv200x", "-work work", '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), + '-log "%s"' % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), "-quiet", "custom", "flags", @@ -210,23 +184,17 @@ def test_compile_project_vhdl_extra_flags( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") - def test_compile_project_vhdl_hdlvar( - self, check_output, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_compile_project_vhdl_hdlvar(self, check_output, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None - simif = IncisiveInterface( - prefix="prefix", output_path=self.output_path, hdlvar="custom_hdlvar" - ) + simif = IncisiveInterface(prefix="prefix", output_path=self.output_path, hdlvar="custom_hdlvar") project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") project.add_source_file("file.vhd", "lib", file_type="vhdl") simif.compile_project(project) args_file = str(Path(self.output_path) / "irun_compile_vhdl_file_lib.args") - check_output.assert_called_once_with( - [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() - ) + check_output.assert_called_once_with([str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env()) self.assertEqual( read_file(args_file).splitlines(), [ @@ -239,8 +207,7 @@ def test_compile_project_vhdl_hdlvar( "-work work", '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-hdlvar "custom_hdlvar"', - '-log "%s"' - % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), + '-log "%s"' % str(Path(self.output_path) / "irun_compile_vhdl_file_lib.log"), "-quiet", '-nclibdirname "."', "-makelib lib_path", @@ -252,9 +219,7 @@ def test_compile_project_vhdl_hdlvar( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") - def test_compile_project_verilog( - self, check_output, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_compile_project_verilog(self, check_output, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) @@ -264,9 +229,7 @@ def test_compile_project_verilog( project.add_source_file("file.v", "lib", file_type="verilog") simif.compile_project(project) args_file = str(Path(self.output_path) / "irun_compile_verilog_file_lib.args") - check_output.assert_called_once_with( - [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() - ) + check_output.assert_called_once_with([str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env()) self.assertEqual( read_file(args_file).splitlines(), [ @@ -278,8 +241,7 @@ def test_compile_project_verilog( "-nowarn DLCVAR", "-work work", '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), + '-log "%s"' % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), "-quiet", '-incdir "cds_root_irun/tools/spectre/etc/ahdl/"', '-nclibdirname "."', @@ -292,9 +254,7 @@ def test_compile_project_verilog( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") - def test_compile_project_system_verilog( - self, check_output, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_compile_project_system_verilog(self, check_output, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) @@ -304,9 +264,7 @@ def test_compile_project_system_verilog( project.add_source_file("file.sv", "lib", file_type="systemverilog") simif.compile_project(project) args_file = str(Path(self.output_path) / "irun_compile_verilog_file_lib.args") - check_output.assert_called_once_with( - [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() - ) + check_output.assert_called_once_with([str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env()) self.assertEqual( read_file(args_file).splitlines(), [ @@ -318,8 +276,7 @@ def test_compile_project_system_verilog( "-nowarn DLCVAR", "-work work", '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), + '-log "%s"' % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), "-quiet", '-incdir "cds_root_irun/tools/spectre/etc/ahdl/"', '-nclibdirname "."', @@ -346,9 +303,7 @@ def test_compile_project_system_verilog( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") - def test_compile_project_verilog_extra_flags( - self, check_output, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_compile_project_verilog_extra_flags(self, check_output, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) @@ -356,14 +311,10 @@ def test_compile_project_verilog_extra_flags( project.add_library("lib", "lib_path") write_file("file.v", "") source_file = project.add_source_file("file.v", "lib", file_type="verilog") - source_file.set_compile_option( - "incisive.irun_verilog_flags", ["custom", "flags"] - ) + source_file.set_compile_option("incisive.irun_verilog_flags", ["custom", "flags"]) simif.compile_project(project) args_file = str(Path(self.output_path) / "irun_compile_verilog_file_lib.args") - check_output.assert_called_once_with( - [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() - ) + check_output.assert_called_once_with([str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env()) self.assertEqual( read_file(args_file).splitlines(), [ @@ -377,8 +328,7 @@ def test_compile_project_verilog_extra_flags( "custom", "flags", '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), + '-log "%s"' % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), "-quiet", '-incdir "cds_root_irun/tools/spectre/etc/ahdl/"', '-nclibdirname "."', @@ -391,23 +341,17 @@ def test_compile_project_verilog_extra_flags( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") - def test_compile_project_verilog_include( - self, check_output, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_compile_project_verilog_include(self, check_output, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") write_file("file.v", "") - project.add_source_file( - "file.v", "lib", file_type="verilog", include_dirs=["include"] - ) + project.add_source_file("file.v", "lib", file_type="verilog", include_dirs=["include"]) simif.compile_project(project) args_file = str(Path(self.output_path) / "irun_compile_verilog_file_lib.args") - check_output.assert_called_once_with( - [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() - ) + check_output.assert_called_once_with([str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env()) self.assertEqual( read_file(args_file).splitlines(), [ @@ -419,8 +363,7 @@ def test_compile_project_verilog_include( "-nowarn DLCVAR", "-work work", '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), + '-log "%s"' % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), "-quiet", '-incdir "include"', '-incdir "cds_root_irun/tools/spectre/etc/ahdl/"', @@ -434,23 +377,17 @@ def test_compile_project_verilog_include( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") - def test_compile_project_verilog_define( - self, check_output, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_compile_project_verilog_define(self, check_output, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") write_file("file.v", "") - project.add_source_file( - "file.v", "lib", file_type="verilog", defines=dict(defname="defval") - ) + project.add_source_file("file.v", "lib", file_type="verilog", defines=dict(defname="defval")) simif.compile_project(project) args_file = str(Path(self.output_path) / "irun_compile_verilog_file_lib.args") - check_output.assert_called_once_with( - [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() - ) + check_output.assert_called_once_with([str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env()) self.assertEqual( read_file(args_file).splitlines(), [ @@ -462,8 +399,7 @@ def test_compile_project_verilog_define( "-nowarn DLCVAR", "-work work", '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), + '-log "%s"' % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), "-quiet", '-incdir "cds_root_irun/tools/spectre/etc/ahdl/"', "-define defname=defval", @@ -477,25 +413,17 @@ def test_compile_project_verilog_define( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") - def test_compile_project_verilog_hdlvar( - self, check_output, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_compile_project_verilog_hdlvar(self, check_output, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None - simif = IncisiveInterface( - prefix="prefix", output_path=self.output_path, hdlvar="custom_hdlvar" - ) + simif = IncisiveInterface(prefix="prefix", output_path=self.output_path, hdlvar="custom_hdlvar") project = Project() project.add_library("lib", "lib_path") write_file("file.v", "") - project.add_source_file( - "file.v", "lib", file_type="verilog", defines=dict(defname="defval") - ) + project.add_source_file("file.v", "lib", file_type="verilog", defines=dict(defname="defval")) simif.compile_project(project) args_file = str(Path(self.output_path) / "irun_compile_verilog_file_lib.args") - check_output.assert_called_once_with( - [str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env() - ) + check_output.assert_called_once_with([str(Path("prefix") / "irun"), "-f", args_file], env=simif.get_env()) self.assertEqual( read_file(args_file).splitlines(), [ @@ -508,8 +436,7 @@ def test_compile_project_verilog_hdlvar( "-work work", '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), '-hdlvar "custom_hdlvar"', - '-log "%s"' - % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), + '-log "%s"' % str(Path(self.output_path) / "irun_compile_verilog_file_lib.log"), "-quiet", '-incdir "cds_root_irun/tools/spectre/etc/ahdl/"', "-define defname=defval", @@ -560,9 +487,7 @@ def test_create_cds_lib_virtuoso(self, find_cds_root_irun, find_cds_root_virtuos @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.incisive.run_command", autospec=True, return_value=True) - def test_simulate_vhdl( - self, run_command, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_simulate_vhdl(self, run_command, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) @@ -572,19 +497,13 @@ def test_simulate_vhdl( write_file("file.vhd", "") project.add_source_file("file.vhd", "lib", file_type="vhdl") - with mock.patch( - "vunit.sim_if.check_output", autospec=True, return_value="" - ) as dummy: + with mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") as dummy: simif.compile_project(project) config = make_config() self.assertTrue(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = str( - Path("suite_output_path") / simif.name / "irun_elaborate.args" - ) - simulate_args_file = str( - Path("suite_output_path") / simif.name / "irun_simulate.args" - ) + elaborate_args_file = str(Path("suite_output_path") / simif.name / "irun_elaborate.args") + simulate_args_file = str(Path("suite_output_path") / simif.name / "irun_simulate.args") run_command.assert_has_calls( [ mock.call( @@ -620,8 +539,7 @@ def test_simulate_vhdl( "-work work", '-nclibdirname "%s"' % str(Path(self.output_path) / "libraries"), '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path("suite_output_path") / simif.name / "irun_elaborate.log"), + '-log "%s"' % str(Path("suite_output_path") / simif.name / "irun_elaborate.log"), "-quiet", '-reflib "lib_path"', "-access +r", @@ -645,8 +563,7 @@ def test_simulate_vhdl( "-work work", '-nclibdirname "%s"' % str(Path(self.output_path) / "libraries"), '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path("suite_output_path") / simif.name / "irun_simulate.log"), + '-log "%s"' % str(Path("suite_output_path") / simif.name / "irun_simulate.log"), "-quiet", '-reflib "lib_path"', "-access +r", @@ -658,9 +575,7 @@ def test_simulate_vhdl( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.incisive.run_command", autospec=True, return_value=True) - def test_simulate_verilog( - self, run_command, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_simulate_verilog(self, run_command, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) @@ -670,19 +585,13 @@ def test_simulate_verilog( write_file("file.vhd", "") project.add_source_file("file.vhd", "lib", file_type="vhdl") - with mock.patch( - "vunit.sim_if.check_output", autospec=True, return_value="" - ) as dummy: + with mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") as dummy: simif.compile_project(project) config = make_config(verilog=True) self.assertTrue(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = str( - Path("suite_output_path") / simif.name / "irun_elaborate.args" - ) - simulate_args_file = str( - Path("suite_output_path") / simif.name / "irun_simulate.args" - ) + elaborate_args_file = str(Path("suite_output_path") / simif.name / "irun_elaborate.args") + simulate_args_file = str(Path("suite_output_path") / simif.name / "irun_simulate.args") run_command.assert_has_calls( [ mock.call( @@ -718,8 +627,7 @@ def test_simulate_verilog( "-work work", '-nclibdirname "%s"' % str(Path(self.output_path) / "libraries"), '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path("suite_output_path") / simif.name / "irun_elaborate.log"), + '-log "%s"' % str(Path("suite_output_path") / simif.name / "irun_elaborate.log"), "-quiet", '-reflib "lib_path"', "-access +r", @@ -743,8 +651,7 @@ def test_simulate_verilog( "-work work", '-nclibdirname "%s"' % str(Path(self.output_path) / "libraries"), '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path("suite_output_path") / simif.name / "irun_simulate.log"), + '-log "%s"' % str(Path("suite_output_path") / simif.name / "irun_simulate.log"), "-quiet", '-reflib "lib_path"', "-access +r", @@ -756,22 +663,14 @@ def test_simulate_verilog( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.incisive.run_command", autospec=True, return_value=True) - def test_simulate_extra_flags( - self, run_command, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_simulate_extra_flags(self, run_command, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) - config = make_config( - sim_options={"incisive.irun_sim_flags": ["custom", "flags"]} - ) + config = make_config(sim_options={"incisive.irun_sim_flags": ["custom", "flags"]}) self.assertTrue(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = str( - Path("suite_output_path") / simif.name / "irun_elaborate.args" - ) - simulate_args_file = str( - Path("suite_output_path") / simif.name / "irun_simulate.args" - ) + elaborate_args_file = str(Path("suite_output_path") / simif.name / "irun_elaborate.args") + simulate_args_file = str(Path("suite_output_path") / simif.name / "irun_simulate.args") run_command.assert_has_calls( [ mock.call( @@ -802,22 +701,14 @@ def test_simulate_extra_flags( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.incisive.run_command", autospec=True, return_value=True) - def test_simulate_generics_and_parameters( - self, run_command, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_simulate_generics_and_parameters(self, run_command, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) - config = make_config( - verilog=True, generics={"genstr": "genval", "genint": 1, "genbool": True} - ) + config = make_config(verilog=True, generics={"genstr": "genval", "genint": 1, "genbool": True}) self.assertTrue(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = str( - Path("suite_output_path") / simif.name / "irun_elaborate.args" - ) - simulate_args_file = str( - Path("suite_output_path") / simif.name / "irun_simulate.args" - ) + elaborate_args_file = str(Path("suite_output_path") / simif.name / "irun_elaborate.args") + simulate_args_file = str(Path("suite_output_path") / simif.name / "irun_simulate.args") run_command.assert_has_calls( [ mock.call( @@ -846,22 +737,14 @@ def test_simulate_generics_and_parameters( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.incisive.run_command", autospec=True, return_value=True) - def test_simulate_hdlvar( - self, run_command, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_simulate_hdlvar(self, run_command, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None - simif = IncisiveInterface( - prefix="prefix", output_path=self.output_path, hdlvar="custom_hdlvar" - ) + simif = IncisiveInterface(prefix="prefix", output_path=self.output_path, hdlvar="custom_hdlvar") config = make_config() self.assertTrue(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = str( - Path("suite_output_path") / simif.name / "irun_elaborate.args" - ) - simulate_args_file = str( - Path("suite_output_path") / simif.name / "irun_simulate.args" - ) + elaborate_args_file = str(Path("suite_output_path") / simif.name / "irun_elaborate.args") + simulate_args_file = str(Path("suite_output_path") / simif.name / "irun_simulate.args") run_command.assert_has_calls( [ mock.call( @@ -893,14 +776,8 @@ def test_elaborate(self, run_command, find_cds_root_irun, find_cds_root_virtuoso find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) config = make_config(verilog=True) - self.assertTrue( - simif.simulate( - "suite_output_path", "test_suite_name", config, elaborate_only=True - ) - ) - elaborate_args_file = str( - Path("suite_output_path") / simif.name / "irun_elaborate.args" - ) + self.assertTrue(simif.simulate("suite_output_path", "test_suite_name", config, elaborate_only=True)) + elaborate_args_file = str(Path("suite_output_path") / simif.name / "irun_elaborate.args") run_command.assert_has_calls( [ mock.call( @@ -931,8 +808,7 @@ def test_elaborate(self, run_command, find_cds_root_irun, find_cds_root_virtuoso "-work work", '-nclibdirname "%s"' % str(Path(self.output_path) / "libraries"), '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path("suite_output_path") / simif.name / "irun_elaborate.log"), + '-log "%s"' % str(Path("suite_output_path") / simif.name / "irun_elaborate.log"), "-quiet", "-access +r", '-input "@run"', @@ -943,17 +819,13 @@ def test_elaborate(self, run_command, find_cds_root_irun, find_cds_root_virtuoso @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.incisive.run_command", autospec=True, return_value=False) - def test_elaborate_fail( - self, run_command, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_elaborate_fail(self, run_command, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) config = make_config() self.assertFalse(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = str( - Path("suite_output_path") / simif.name / "irun_elaborate.args" - ) + elaborate_args_file = str(Path("suite_output_path") / simif.name / "irun_elaborate.args") run_command.assert_has_calls( [ mock.call( @@ -970,23 +842,15 @@ def test_elaborate_fail( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") - @mock.patch( - "vunit.sim_if.incisive.run_command", autospec=True, side_effect=[True, False] - ) - def test_simulate_fail( - self, run_command, find_cds_root_irun, find_cds_root_virtuoso - ): + @mock.patch("vunit.sim_if.incisive.run_command", autospec=True, side_effect=[True, False]) + def test_simulate_fail(self, run_command, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None simif = IncisiveInterface(prefix="prefix", output_path=self.output_path) config = make_config() self.assertFalse(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = str( - Path("suite_output_path") / simif.name / "irun_elaborate.args" - ) - simulate_args_file = str( - Path("suite_output_path") / simif.name / "irun_simulate.args" - ) + elaborate_args_file = str(Path("suite_output_path") / simif.name / "irun_elaborate.args") + simulate_args_file = str(Path("suite_output_path") / simif.name / "irun_simulate.args") run_command.assert_has_calls( [ mock.call( @@ -1009,9 +873,7 @@ def test_simulate_fail( @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_virtuoso") @mock.patch("vunit.sim_if.incisive.IncisiveInterface.find_cds_root_irun") @mock.patch("vunit.sim_if.incisive.run_command", autospec=True, return_value=True) - def test_simulate_gui( - self, run_command, find_cds_root_irun, find_cds_root_virtuoso - ): + def test_simulate_gui(self, run_command, find_cds_root_irun, find_cds_root_virtuoso): find_cds_root_irun.return_value = "cds_root_irun" find_cds_root_virtuoso.return_value = None @@ -1020,21 +882,13 @@ def test_simulate_gui( write_file("file.vhd", "") project.add_source_file("file.vhd", "lib", file_type="vhdl") - simif = IncisiveInterface( - prefix="prefix", output_path=self.output_path, gui=True - ) - with mock.patch( - "vunit.sim_if.check_output", autospec=True, return_value="" - ) as dummy: + simif = IncisiveInterface(prefix="prefix", output_path=self.output_path, gui=True) + with mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") as dummy: simif.compile_project(project) config = make_config() self.assertTrue(simif.simulate("suite_output_path", "test_suite_name", config)) - elaborate_args_file = str( - Path("suite_output_path") / simif.name / "irun_elaborate.args" - ) - simulate_args_file = str( - Path("suite_output_path") / simif.name / "irun_simulate.args" - ) + elaborate_args_file = str(Path("suite_output_path") / simif.name / "irun_elaborate.args") + simulate_args_file = str(Path("suite_output_path") / simif.name / "irun_simulate.args") run_command.assert_has_calls( [ mock.call( @@ -1069,8 +923,7 @@ def test_simulate_gui( "-work work", '-nclibdirname "%s"' % str(Path(self.output_path) / "libraries"), '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path("suite_output_path") / simif.name / "irun_elaborate.log"), + '-log "%s"' % str(Path("suite_output_path") / simif.name / "irun_elaborate.log"), "-quiet", '-reflib "lib_path"', "-access +rwc", @@ -1094,8 +947,7 @@ def test_simulate_gui( "-work work", '-nclibdirname "%s"' % str(Path(self.output_path) / "libraries"), '-cdslib "%s"' % str(Path(self.output_path) / "cds.lib"), - '-log "%s"' - % str(Path("suite_output_path") / simif.name / "irun_simulate.log"), + '-log "%s"' % str(Path("suite_output_path") / simif.name / "irun_simulate.log"), "-quiet", '-reflib "lib_path"', "-access +rwc", diff --git a/tests/unit/test_modelsim_interface.py b/tests/unit/test_modelsim_interface.py index 82840b68c..c66f034b6 100644 --- a/tests/unit/test_modelsim_interface.py +++ b/tests/unit/test_modelsim_interface.py @@ -29,15 +29,11 @@ class TestModelSimInterface(unittest.TestCase): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.modelsim.Process", autospec=True) def test_compile_project_vhdl_2008(self, process, check_output): - simif = ModelSimInterface( - prefix=self.prefix_path, output_path=self.output_path, persistent=False - ) + simif = ModelSimInterface(prefix=self.prefix_path, output_path=self.output_path, persistent=False) project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2008") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2008")) simif.compile_project(project) process_args = [str(Path(self.prefix_path) / "vlib"), "-unix", "lib_path"] process.assert_called_once_with(process_args, env=simif.get_env()) @@ -56,15 +52,11 @@ def test_compile_project_vhdl_2008(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.modelsim.Process", autospec=True) def test_compile_project_vhdl_2002(self, process, check_output): - simif = ModelSimInterface( - prefix=self.prefix_path, output_path=self.output_path, persistent=False - ) + simif = ModelSimInterface(prefix=self.prefix_path, output_path=self.output_path, persistent=False) project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2002") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2002")) simif.compile_project(project) process_args = [str(Path(self.prefix_path) / "vlib"), "-unix", "lib_path"] process.assert_called_once_with(process_args, env=simif.get_env()) @@ -83,15 +75,11 @@ def test_compile_project_vhdl_2002(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.modelsim.Process", autospec=True) def test_compile_project_vhdl_93(self, process, check_output): - simif = ModelSimInterface( - prefix=self.prefix_path, output_path=self.output_path, persistent=False - ) + simif = ModelSimInterface(prefix=self.prefix_path, output_path=self.output_path, persistent=False) project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("93") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("93")) simif.compile_project(project) process_args = [str(Path(self.prefix_path) / "vlib"), "-unix", "lib_path"] process.assert_called_once_with(process_args, env=simif.get_env()) @@ -110,9 +98,7 @@ def test_compile_project_vhdl_93(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.modelsim.Process", autospec=True) def test_compile_project_vhdl_extra_flags(self, process, check_output): - simif = ModelSimInterface( - prefix=self.prefix_path, output_path=self.output_path, persistent=False - ) + simif = ModelSimInterface(prefix=self.prefix_path, output_path=self.output_path, persistent=False) project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") @@ -138,9 +124,7 @@ def test_compile_project_vhdl_extra_flags(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.modelsim.Process", autospec=True) def test_compile_project_verilog(self, process, check_output): - simif = ModelSimInterface( - prefix=self.prefix_path, output_path=self.output_path, persistent=False - ) + simif = ModelSimInterface(prefix=self.prefix_path, output_path=self.output_path, persistent=False) project = Project() project.add_library("lib", "lib_path") write_file("file.v", "") @@ -164,9 +148,7 @@ def test_compile_project_verilog(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.modelsim.Process", autospec=True) def test_compile_project_system_verilog(self, process, check_output): - simif = ModelSimInterface( - prefix=self.prefix_path, output_path=self.output_path, persistent=False - ) + simif = ModelSimInterface(prefix=self.prefix_path, output_path=self.output_path, persistent=False) project = Project() project.add_library("lib", "lib_path") write_file("file.sv", "") @@ -191,9 +173,7 @@ def test_compile_project_system_verilog(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.modelsim.Process", autospec=True) def test_compile_project_verilog_extra_flags(self, process, check_output): - simif = ModelSimInterface( - prefix=self.prefix_path, output_path=self.output_path, persistent=False - ) + simif = ModelSimInterface(prefix=self.prefix_path, output_path=self.output_path, persistent=False) project = Project() project.add_library("lib", "lib_path") write_file("file.v", "") @@ -220,15 +200,11 @@ def test_compile_project_verilog_extra_flags(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.modelsim.Process", autospec=True) def test_compile_project_verilog_include(self, process, check_output): - simif = ModelSimInterface( - prefix=self.prefix_path, output_path=self.output_path, persistent=False - ) + simif = ModelSimInterface(prefix=self.prefix_path, output_path=self.output_path, persistent=False) project = Project() project.add_library("lib", "lib_path") write_file("file.v", "") - project.add_source_file( - "file.v", "lib", file_type="verilog", include_dirs=["include"] - ) + project.add_source_file("file.v", "lib", file_type="verilog", include_dirs=["include"]) simif.compile_project(project) process_args = [str(Path(self.prefix_path) / "vlib"), "-unix", "lib_path"] process.assert_called_once_with(process_args, env=simif.get_env()) @@ -249,15 +225,11 @@ def test_compile_project_verilog_include(self, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.modelsim.Process", autospec=True) def test_compile_project_verilog_define(self, process, check_output): - simif = ModelSimInterface( - prefix=self.prefix_path, output_path=self.output_path, persistent=False - ) + simif = ModelSimInterface(prefix=self.prefix_path, output_path=self.output_path, persistent=False) project = Project() project.add_library("lib", "lib_path") write_file("file.v", "") - project.add_source_file( - "file.v", "lib", file_type="verilog", defines={"defname": "defval"} - ) + project.add_source_file("file.v", "lib", file_type="verilog", defines={"defname": "defval"}) simif.compile_project(project) process_args = [str(Path(self.prefix_path) / "vlib"), "-unix", "lib_path"] process.assert_called_once_with(process_args, env=simif.get_env()) @@ -291,9 +263,7 @@ def test_copies_modelsim_ini_file_from_install(self): with open(user_modelsim_ini, "w") as fptr: fptr.write("user") - ModelSimInterface( - prefix=self.prefix_path, output_path=self.output_path, persistent=False - ) + ModelSimInterface(prefix=self.prefix_path, output_path=self.output_path, persistent=False) with open(modelsim_ini, "r") as fptr: self.assertEqual(fptr.read(), "installed") @@ -307,9 +277,7 @@ def test_copies_modelsim_ini_file_from_user(self): fptr.write("user") with set_env(VUNIT_MODELSIM_INI=user_modelsim_ini): - ModelSimInterface( - prefix=self.prefix_path, output_path=self.output_path, persistent=False - ) + ModelSimInterface(prefix=self.prefix_path, output_path=self.output_path, persistent=False) with open(modelsim_ini, "r") as fptr: self.assertEqual(fptr.read(), "user") @@ -326,9 +294,7 @@ def test_overwrites_modelsim_ini_file_from_install(self): with open(user_modelsim_ini, "w") as fptr: fptr.write("user") - ModelSimInterface( - prefix=self.prefix_path, output_path=self.output_path, persistent=False - ) + ModelSimInterface(prefix=self.prefix_path, output_path=self.output_path, persistent=False) with open(modelsim_ini, "r") as fptr: self.assertEqual(fptr.read(), "installed") @@ -345,9 +311,7 @@ def test_overwrites_modelsim_ini_file_from_user(self): fptr.write("user") with set_env(VUNIT_MODELSIM_INI=user_modelsim_ini): - ModelSimInterface( - prefix=self.prefix_path, output_path=self.output_path, persistent=False - ) + ModelSimInterface(prefix=self.prefix_path, output_path=self.output_path, persistent=False) with open(modelsim_ini, "r") as fptr: self.assertEqual(fptr.read(), "user") diff --git a/tests/unit/test_ostools.py b/tests/unit/test_ostools.py index 4be50c054..5861e592d 100644 --- a/tests/unit/test_ostools.py +++ b/tests/unit/test_ostools.py @@ -65,9 +65,7 @@ def test_run_error_subprocess(self): ) process = Process([sys.executable, python_script]) output = [] - self.assertRaises( - Process.NonZeroExitCode, process.consume_output, output.append - ) + self.assertRaises(Process.NonZeroExitCode, process.consume_output, output.append) self.assertEqual(output, ["error"]) def test_parses_stderr(self): diff --git a/tests/unit/test_project.py b/tests/unit/test_project.py index 08c7b50ae..11f054748 100644 --- a/tests/unit/test_project.py +++ b/tests/unit/test_project.py @@ -72,9 +72,7 @@ def test_parses_entity_architecture(self): """, ) - self.assert_has_entity( - file1, "foo", architecture_names=["arch", "arch2", "arch3"] - ) + self.assert_has_entity(file1, "foo", architecture_names=["arch", "arch2", "arch3"]) self.add_source_file( "lib", "file3.vhd", @@ -85,9 +83,7 @@ def test_parses_entity_architecture(self): """, ) - self.assert_has_entity( - file1, "foo", architecture_names=["arch", "arch2", "arch3", "arch4"] - ) + self.assert_has_entity(file1, "foo", architecture_names=["arch", "arch2", "arch3", "arch4"]) self.assert_has_architecture("file1.vhd", "arch", "foo") self.assert_has_architecture("file1.vhd", "arch2", "foo") self.assert_has_architecture("file2.vhd", "arch3", "foo") @@ -357,8 +353,7 @@ def test_error_on_case_insensitive_library_name_conflict(self): except RuntimeError as exception: self.assertEqual( str(exception), - "Library name 'lib' not case-insensitive unique. " - "Library name 'Lib' previously defined", + "Library name 'lib' not case-insensitive unique. " "Library name 'Lib' previously defined", ) else: raise AssertionError("RuntimeError not raised") @@ -855,9 +850,7 @@ def test_warning_on_duplicate_context(self): def test_error_on_adding_duplicate_library(self): self.project.add_library(logical_name="lib", directory="dir") - self.assertRaises( - ValueError, self.project.add_library, logical_name="lib", directory="dir" - ) + self.assertRaises(ValueError, self.project.add_library, logical_name="lib", directory="dir") def test_warning_on_duplicate_verilog_module(self): self.project.add_library("lib", "lib_path") @@ -935,18 +928,14 @@ def test_should_recompile_files_after_changing_vhdl_standard(self): self.project = Project() self.project.add_library("lib", "lib_path") - source_file = self.project.add_source_file( - "file_name.vhd", library_name="lib", vhdl_standard="2008" - ) + source_file = self.project.add_source_file("file_name.vhd", library_name="lib", vhdl_standard="2008") self.assert_should_recompile([source_file]) self.update(source_file) self.assert_should_recompile([]) self.project = Project() self.project.add_library("lib", "lib_path") - source_file = self.project.add_source_file( - "file_name.vhd", library_name="lib", vhdl_standard="2002" - ) + source_file = self.project.add_source_file("file_name.vhd", library_name="lib", vhdl_standard="2002") self.assert_should_recompile([source_file]) def test_add_compile_option(self): @@ -981,12 +970,8 @@ def test_compile_option_validation(self): self.project.add_library("lib", "lib_path") source_file = self.add_source_file("lib", "file.vhd", "") self.assertRaises(ValueError, source_file.set_compile_option, "foo", None) - self.assertRaises( - ValueError, source_file.set_compile_option, "ghdl.flags", None - ) - self.assertRaises( - ValueError, source_file.add_compile_option, "ghdl.flags", None - ) + self.assertRaises(ValueError, source_file.set_compile_option, "ghdl.flags", None) + self.assertRaises(ValueError, source_file.add_compile_option, "ghdl.flags", None) self.assertRaises(ValueError, source_file.get_compile_option, "foo") def test_should_recompile_files_affected_by_change_with_later_timestamp(self): @@ -1077,9 +1062,7 @@ def test_finds_component_instantiation_dependencies(self): self.assert_has_component_instantiation("top.vhd", "foo") self.assert_has_component_instantiation("top.vhd", "foo2") - dependencies = self.project.get_dependencies_in_compile_order( - [top], implementation_dependencies=True - ) + dependencies = self.project.get_dependencies_in_compile_order([top], implementation_dependencies=True) self.assertIn(comp1, dependencies) self.assertIn(comp1_arch, dependencies) self.assertIn(comp2, dependencies) @@ -1179,9 +1162,7 @@ def test_compiles_same_file_into_different_libraries(self): ) ) - self.assertNotEqual( - self.hash_file_name_of(pkgs[0]), self.hash_file_name_of(pkgs[1]) - ) + self.assertNotEqual(self.hash_file_name_of(pkgs[0]), self.hash_file_name_of(pkgs[1])) self.assertEqual(len(self.project.get_files_in_compile_order()), 5) self.assert_compiles(other_pkg, before=pkgs[0]) self.assert_compiles(other_pkg, before=pkgs[1]) @@ -1472,9 +1453,7 @@ def test_recompile_when_updating_defines(self): self.project = Project() self.project.add_library("lib", "lib_path") - mod1 = self.add_source_file( - "lib", "module1.v", contents1, defines={"foo": "bar"} - ) + mod1 = self.add_source_file("lib", "module1.v", contents1, defines={"foo": "bar"}) mod2 = self.add_source_file("lib", "module2.v", contents2) self.assert_should_recompile([mod1]) self.update(mod1) @@ -1483,9 +1462,7 @@ def test_recompile_when_updating_defines(self): self.project = Project() self.project.add_library("lib", "lib_path") - mod1 = self.add_source_file( - "lib", "module1.v", contents1, defines={"foo": "other_bar"} - ) + mod1 = self.add_source_file("lib", "module1.v", contents1, defines={"foo": "other_bar"}) mod2 = self.add_source_file("lib", "module2.v", contents2) self.assert_should_recompile([mod1]) self.update(mod1) @@ -1631,9 +1608,7 @@ def test_circular_dependencies_through_libraries(self): end architecture; """, ) - self.project.get_dependencies_in_compile_order( - [file3], implementation_dependencies=True - ) + self.project.get_dependencies_in_compile_order([file3], implementation_dependencies=True) def test_dependencies_on_multiple_libraries(self): """ @@ -1667,9 +1642,7 @@ def test_dependencies_on_multiple_libraries(self): begin my_buffer_i : buffer1 port map (D => D,Q => Q);end architecture; """, ) - dep_files = self.project.get_dependencies_in_compile_order( - [file3], implementation_dependencies=True - ) + dep_files = self.project.get_dependencies_in_compile_order([file3], implementation_dependencies=True) self.assertNotIn(lib2_file1_vhd, dep_files) def test_dependencies_on_separated_architecture(self): @@ -1725,9 +1698,7 @@ def test_dependencies_on_separated_architecture(self): end architecture; """, ) - dep_files = self.project.get_dependencies_in_compile_order( - [file3], implementation_dependencies=True - ) + dep_files = self.project.get_dependencies_in_compile_order([file3], implementation_dependencies=True) self.assertIn(file1_arch_vhd, dep_files) def test_dependencies_on_verilog_component(self): @@ -1757,9 +1728,7 @@ def test_dependencies_on_verilog_component(self): begin my_buffer_i : buffer1 port map (D => D,Q => Q);end architecture; """, ) - dep_files = self.project.get_dependencies_in_compile_order( - [file3], implementation_dependencies=True - ) + dep_files = self.project.get_dependencies_in_compile_order([file3], implementation_dependencies=True) self.assertIn(file1_v, dep_files) def create_dummy_three_file_project(self, update_file1=False): @@ -1832,9 +1801,7 @@ def test_add_source_file_has_vhdl_standard(self): for std in ("93", "2002", "2008", "2019"): project = Project() project.add_library("lib", "lib_path") - source_file = project.add_source_file( - "file.vhd", library_name="lib", file_type="vhdl", vhdl_standard=std - ) + source_file = project.add_source_file("file.vhd", library_name="lib", file_type="vhdl", vhdl_standard=std) self.assertEqual(source_file.get_vhdl_standard(), std) def test_add_source_file_has_no_parse_vhdl(self): @@ -1909,9 +1876,7 @@ def test_add_external_library_must_be_a_directory(self): try: self.project.add_library("lib3", "lib_path3", is_external=True) except ValueError as err: - self.assertEqual( - str(err), "External library must be a directory. Got 'lib_path3'" - ) + self.assertEqual(str(err), "External library must be a directory. Got 'lib_path3'") else: assert False, "ValueError not raised" @@ -1929,9 +1894,7 @@ def hash_file_name_of(self, source_file): """ Get the hash file name of a source_file """ - return self.project._hash_file_name_of( # pylint: disable=protected-access - source_file - ) + return self.project._hash_file_name_of(source_file) # pylint: disable=protected-access def update(self, source_file): """ @@ -1968,9 +1931,7 @@ def assert_has_package_body(self, source_file_name, package_name): """ Assert that there is a package body with package_name withing source_file_name """ - unit = self._find_design_unit( - source_file_name, "package body", package_name, False, package_name - ) + unit = self._find_design_unit(source_file_name, "package body", package_name, False, package_name) self.assertIsNotNone(unit) def assert_has_package(self, source_file_name, name): @@ -1980,9 +1941,7 @@ def assert_has_package(self, source_file_name, name): unit = self._find_design_unit(source_file_name, "package", name) self.assertIsNotNone(unit) - def assert_has_entity( - self, source_file, name, generic_names=None, architecture_names=None - ): + def assert_has_entity(self, source_file, name, generic_names=None, architecture_names=None): """ Assert that there is an entity with name withing source_file that has architectures with architecture_names. @@ -2002,9 +1961,7 @@ def assert_has_architecture(self, source_file_name, name, entity_name): """ Assert that there is an architecture with name of entity_name within source_file_name """ - unit = self._find_design_unit( - source_file_name, "architecture", name, False, entity_name - ) + unit = self._find_design_unit(source_file_name, "architecture", name, False, entity_name) self.assertIsNotNone(unit) def assert_has_component_instantiation(self, source_file_name, component_name): @@ -2044,9 +2001,7 @@ def _find_design_unit( # pylint: disable=too-many-arguments self.assertEqual(design_unit.is_primary, is_primary) self.assertEqual(source_file.name, source_file_name) if not is_primary: - self.assertEqual( - design_unit.primary_design_unit, primary_design_unit_name - ) + self.assertEqual(design_unit.primary_design_unit, primary_design_unit_name) return design_unit return None diff --git a/tests/unit/test_rivierapro_interface.py b/tests/unit/test_rivierapro_interface.py index a2d36e54b..2020c3d36 100644 --- a/tests/unit/test_rivierapro_interface.py +++ b/tests/unit/test_rivierapro_interface.py @@ -27,17 +27,13 @@ class TestRivieraProInterface(unittest.TestCase): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - @mock.patch( - "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" - ) + @mock.patch("vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix") def test_compile_project_vhdl_2019(self, _find_prefix, process, check_output): simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2019") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2019")) simif.compile_project(project) process.assert_any_call( [str(Path("prefix") / "vlib"), "lib", "lib_path"], @@ -65,17 +61,13 @@ def test_compile_project_vhdl_2019(self, _find_prefix, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - @mock.patch( - "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" - ) + @mock.patch("vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix") def test_compile_project_vhdl_2008(self, _find_prefix, process, check_output): simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2008") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2008")) simif.compile_project(project) process.assert_any_call( [str(Path("prefix") / "vlib"), "lib", "lib_path"], @@ -103,17 +95,13 @@ def test_compile_project_vhdl_2008(self, _find_prefix, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - @mock.patch( - "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" - ) + @mock.patch("vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix") def test_compile_project_vhdl_2002(self, _find_prefix, process, check_output): simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2002") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("2002")) simif.compile_project(project) process.assert_any_call( [str(Path("prefix") / "vlib"), "lib", "lib_path"], @@ -141,17 +129,13 @@ def test_compile_project_vhdl_2002(self, _find_prefix, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - @mock.patch( - "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" - ) + @mock.patch("vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix") def test_compile_project_vhdl_93(self, _find_prefix, process, check_output): simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") write_file("file.vhd", "") - project.add_source_file( - "file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("93") - ) + project.add_source_file("file.vhd", "lib", file_type="vhdl", vhdl_standard=VHDL.standard("93")) simif.compile_project(project) process.assert_any_call( [str(Path("prefix") / "vlib"), "lib", "lib_path"], @@ -179,12 +163,8 @@ def test_compile_project_vhdl_93(self, _find_prefix, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - @mock.patch( - "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" - ) - def test_compile_project_vhdl_extra_flags( - self, _find_prefix, process, check_output - ): + @mock.patch("vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix") + def test_compile_project_vhdl_extra_flags(self, _find_prefix, process, check_output): simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") @@ -220,9 +200,7 @@ def test_compile_project_vhdl_extra_flags( @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - @mock.patch( - "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" - ) + @mock.patch("vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix") def test_compile_project_verilog(self, _find_prefix, process, check_output): library_cfg = str(Path(self.output_path) / "library.cfg") simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) @@ -258,9 +236,7 @@ def test_compile_project_verilog(self, _find_prefix, process, check_output): @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - @mock.patch( - "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" - ) + @mock.patch("vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix") def test_compile_project_system_verilog(self, _find_prefix, process, check_output): library_cfg = str(Path(self.output_path) / "library.cfg") simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) @@ -297,12 +273,8 @@ def test_compile_project_system_verilog(self, _find_prefix, process, check_outpu @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - @mock.patch( - "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" - ) - def test_compile_project_verilog_extra_flags( - self, _find_prefix, process, check_output - ): + @mock.patch("vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix") + def test_compile_project_verilog_extra_flags(self, _find_prefix, process, check_output): library_cfg = str(Path(self.output_path) / "library.cfg") simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() @@ -340,18 +312,14 @@ def test_compile_project_verilog_extra_flags( @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - @mock.patch( - "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" - ) + @mock.patch("vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix") def test_compile_project_verilog_include(self, _find_prefix, process, check_output): library_cfg = str(Path(self.output_path) / "library.cfg") simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") write_file("file.v", "") - project.add_source_file( - "file.v", "lib", file_type="verilog", include_dirs=["include"] - ) + project.add_source_file("file.v", "lib", file_type="verilog", include_dirs=["include"]) simif.compile_project(project) process.assert_any_call( [str(Path("prefix") / "vlib"), "lib", "lib_path"], @@ -381,18 +349,14 @@ def test_compile_project_verilog_include(self, _find_prefix, process, check_outp @mock.patch("vunit.sim_if.check_output", autospec=True, return_value="") @mock.patch("vunit.sim_if.rivierapro.Process", autospec=True) - @mock.patch( - "vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix" - ) + @mock.patch("vunit.sim_if.rivierapro.RivieraProInterface.find_prefix", return_value="prefix") def test_compile_project_verilog_define(self, _find_prefix, process, check_output): library_cfg = str(Path(self.output_path) / "library.cfg") simif = RivieraProInterface(prefix="prefix", output_path=self.output_path) project = Project() project.add_library("lib", "lib_path") write_file("file.v", "") - project.add_source_file( - "file.v", "lib", file_type="verilog", defines={"defname": "defval"} - ) + project.add_source_file("file.v", "lib", file_type="verilog", defines={"defname": "defval"}) simif.compile_project(project) process.assert_any_call( [str(Path("prefix") / "vlib"), "lib", "lib_path"], diff --git a/tests/unit/test_simulator_interface.py b/tests/unit/test_simulator_interface.py index 553895182..d03db7141 100644 --- a/tests/unit/test_simulator_interface.py +++ b/tests/unit/test_simulator_interface.py @@ -33,9 +33,7 @@ class TestSimulatorInterface(unittest.TestCase): def test_compile_source_files(self): simif = create_simulator_interface() - simif.compile_source_file_command.side_effect = iter( - [["command1"], ["command2"]] - ) + simif.compile_source_file_command.side_effect = iter([["command1"], ["command2"]]) project = Project() project.add_library("lib", "lib_path") write_file("file1.vhd", "") @@ -108,13 +106,9 @@ def compile_source_file_command(source_file): raise AssertionError - def check_output_side_effect( - command, env=None - ): # pylint: disable=missing-docstring, unused-argument + def check_output_side_effect(command, env=None): # pylint: disable=missing-docstring, unused-argument if command == ["command1"]: - raise subprocess.CalledProcessError( - returncode=-1, cmd=command, output="bad stuff" - ) + raise subprocess.CalledProcessError(returncode=-1, cmd=command, output="bad stuff") return "" @@ -152,9 +146,7 @@ def check_output_side_effect( ], any_order=True, ) - self.assertEqual( - project.get_files_in_compile_order(incremental=True), [file1, file2] - ) + self.assertEqual(project.get_files_in_compile_order(incremental=True), [file1, file2]) def test_compile_source_files_check_output_error(self): simif = create_simulator_interface() @@ -166,18 +158,12 @@ def test_compile_source_files_check_output_error(self): with mock.patch("vunit.sim_if.check_output", autospec=True) as check_output: - def check_output_side_effect( - command, env=None - ): # pylint: disable=missing-docstring, unused-argument - raise subprocess.CalledProcessError( - returncode=-1, cmd=command, output="bad stuff" - ) + def check_output_side_effect(command, env=None): # pylint: disable=missing-docstring, unused-argument + raise subprocess.CalledProcessError(returncode=-1, cmd=command, output="bad stuff") check_output.side_effect = check_output_side_effect printer = MockPrinter() - self.assertRaises( - CompileError, simif.compile_source_files, project, printer=printer - ) + self.assertRaises(CompileError, simif.compile_source_files, project, printer=printer) self.assertEqual( printer.output, """\ @@ -191,9 +177,7 @@ def check_output_side_effect( """, ) check_output.assert_called_once_with(["command"], env=simif.get_env()) - self.assertEqual( - project.get_files_in_compile_order(incremental=True), [source_file] - ) + self.assertEqual(project.get_files_in_compile_order(incremental=True), [source_file]) def test_compile_source_files_create_command_error(self): simif = create_simulator_interface() @@ -210,15 +194,11 @@ def raise_compile_error(source_file): # pylint: disable=unused-argument simif.compile_source_file_command.side_effect = raise_compile_error self.assertRaises(CompileError, simif.compile_source_files, project) - self.assertEqual( - project.get_files_in_compile_order(incremental=True), [source_file] - ) + self.assertEqual(project.get_files_in_compile_order(incremental=True), [source_file]) @mock.patch("os.environ", autospec=True) def test_find_prefix(self, environ): - class MySimulatorInterface( - SimulatorInterface - ): # pylint: disable=abstract-method + class MySimulatorInterface(SimulatorInterface): # pylint: disable=abstract-method """ Dummy simulator interface for testing """ @@ -276,25 +256,19 @@ def test_string_option(self): self._test_ok(option, "hello") self._test_ok(option, u"hello") self._test_not_ok(option, False, "Option 'optname' must be a string. Got False") - self._test_not_ok( - option, ["foo"], "Option 'optname' must be a string. Got ['foo']" - ) + self._test_not_ok(option, ["foo"], "Option 'optname' must be a string. Got ['foo']") def test_list_of_string_option(self): option = ListOfStringOption("optname") self._test_ok(option, ["hello", "foo"]) self._test_ok(option, [u"hello"]) - self._test_not_ok( - option, [True], "Option 'optname' must be a list of strings. " "Got [True]" - ) + self._test_not_ok(option, [True], "Option 'optname' must be a list of strings. " "Got [True]") self._test_not_ok( option, [["foo"]], "Option 'optname' must be a list of strings. " "Got [['foo']]", ) - self._test_not_ok( - option, "foo", "Option 'optname' must be a list of strings. " "Got 'foo'" - ) + self._test_not_ok(option, "foo", "Option 'optname' must be a list of strings. " "Got 'foo'") def test_vhdl_assert_level(self): option = VHDLAssertLevelOption() @@ -305,8 +279,7 @@ def test_vhdl_assert_level(self): self._test_not_ok( option, "foo", - "Option 'vhdl_assert_stop_level' must be one of " - "('warning', 'error', 'failure'). Got 'foo'", + "Option 'vhdl_assert_stop_level' must be one of " "('warning', 'error', 'failure'). Got 'foo'", ) @staticmethod @@ -330,9 +303,7 @@ def create_simulator_interface(): Create a simulator interface with fake method """ simif = SimulatorInterface(output_path="output_path", gui=False) - simif.compile_source_file_command = mock.create_autospec( - simif.compile_source_file_command - ) + simif.compile_source_file_command = mock.create_autospec(simif.compile_source_file_command) return simif @@ -344,7 +315,5 @@ class MockPrinter(object): def __init__(self): self.output = "" - def write( - self, text, output_file=None, fg=None, bg=None - ): # pylint: disable=unused-argument + def write(self, text, output_file=None, fg=None, bg=None): # pylint: disable=unused-argument self.output += text diff --git a/tests/unit/test_test_bench.py b/tests/unit/test_test_bench.py index bc0bcbadb..61efc69ef 100644 --- a/tests/unit/test_test_bench.py +++ b/tests/unit/test_test_bench.py @@ -47,16 +47,12 @@ def test_that_single_vhdl_test_is_created(self, tempdir): @staticmethod @with_tempdir def test_no_architecture_at_creation(tempdir): - design_unit = Entity( - "tb_entity", file_name=str(Path(tempdir) / "file.vhd"), no_arch=True - ) + design_unit = Entity("tb_entity", file_name=str(Path(tempdir) / "file.vhd"), no_arch=True) TestBench(design_unit) @with_tempdir def test_no_architecture_gives_runtime_error(self, tempdir): - design_unit = Entity( - "tb_entity", file_name=str(Path(tempdir) / "file.vhd"), no_arch=True - ) + design_unit = Entity("tb_entity", file_name=str(Path(tempdir) / "file.vhd"), no_arch=True) test_bench = TestBench(design_unit) try: self.create_tests(test_bench) @@ -84,9 +80,7 @@ def test_create_default_test(self, tempdir): def test_multiple_architectures_are_not_allowed_for_test_bench(self, tempdir): design_unit = Entity("tb_entity", file_name=str(Path(tempdir) / "file.vhd")) design_unit.generic_names = ["runner_cfg"] - design_unit.add_architecture( - "arch2", file_name=str(Path(tempdir) / "arch2.vhd") - ) + design_unit.add_architecture("arch2", file_name=str(Path(tempdir) / "arch2.vhd")) try: TestBench(design_unit) except RuntimeError as exc: @@ -179,9 +173,7 @@ def test_keyerror_on_non_existent_test(self, tempdir): @with_tempdir def test_creates_tests_when_adding_architecture_late(self, tempdir): - design_unit = Entity( - "tb_entity", file_name=str(Path(tempdir) / "file.vhd"), no_arch=True - ) + design_unit = Entity("tb_entity", file_name=str(Path(tempdir) / "file.vhd"), no_arch=True) design_unit.generic_names = ["runner_cfg"] test_bench = TestBench(design_unit) @@ -232,15 +224,11 @@ def _test_scan_tests_from_file_location(self, tempdir, code): @with_tempdir def test_scan_tests_from_file_location_unix(self, tempdir): - self._test_scan_tests_from_file_location( - tempdir, 'foo \n bar \n if run("Test_1")' - ) + self._test_scan_tests_from_file_location(tempdir, 'foo \n bar \n if run("Test_1")') @with_tempdir def test_scan_tests_from_file_location_dos(self, tempdir): - self._test_scan_tests_from_file_location( - tempdir, 'foo \r\n bar \r\n if run("Test_1")' - ) + self._test_scan_tests_from_file_location(tempdir, 'foo \r\n bar \r\n if run("Test_1")') @with_tempdir def test_scan_tests_from_missing_file(self, tempdir): @@ -283,9 +271,7 @@ def test_that_run_in_same_simulation_attribute_works(self, tempdir): design_unit.generic_names = ["runner_cfg"] test_bench = TestBench(design_unit) tests = self.create_tests(test_bench) - self.assert_has_tests( - tests, [("lib.tb_entity", ("lib.tb_entity.Test_1", "lib.tb_entity.Test_2"))] - ) + self.assert_has_tests(tests, [("lib.tb_entity", ("lib.tb_entity.Test_1", "lib.tb_entity.Test_2"))]) @with_tempdir def test_add_config(self, tempdir): @@ -295,12 +281,8 @@ def test_add_config(self, tempdir): test_bench.set_generic("global_value", "global value") - test_bench.add_config( - name="value=1", generics=dict(value=1, global_value="local value") - ) - test_bench.add_config( - name="value=2", generics=dict(value=2), attributes={".foo": "bar"} - ) + test_bench.add_config(name="value=1", generics=dict(value=1, global_value="local value")) + test_bench.add_config(name="value=2", generics=dict(value=2), attributes={".foo": "bar"}) self.assertRaises( AttributeException, @@ -321,9 +303,7 @@ def test_add_config(self, tempdir): get_config_of(tests, "lib.tb_entity.value=2").generics, {"value": 2, "global_value": "global value"}, ) - self.assertEqual( - get_config_of(tests, "lib.tb_entity.value=2").attributes, {".foo": "bar"} - ) + self.assertEqual(get_config_of(tests, "lib.tb_entity.value=2").attributes, {".foo": "bar"}) @with_tempdir def test_test_case_add_config(self, tempdir): @@ -341,9 +321,7 @@ def test_test_case_add_config(self, tempdir): test_bench.set_sim_option("disable_ieee_warnings", True) test_case = test_bench.get_test_case("test 2") - test_case.add_config( - name="c1", generics=dict(value=1, global_value="local value") - ) + test_case.add_config(name="c1", generics=dict(value=1, global_value="local value")) test_case.add_config( name="c2", generics=dict(value=2), @@ -373,18 +351,12 @@ def test_test_case_add_config(self, tempdir): config_c2_test2 = get_config_of(tests, "lib.tb_entity.c2.test 2") self.assertEqual(config_test1.generics, {"global_value": "global value"}) self.assertEqual(config_c1_test2.attributes, {}) - self.assertEqual( - config_c1_test2.generics, {"value": 1, "global_value": "local value"} - ) + self.assertEqual(config_c1_test2.generics, {"value": 1, "global_value": "local value"}) self.assertEqual(config_c2_test2.attributes, {".foo": "bar"}) - self.assertEqual( - config_c2_test2.generics, {"value": 2, "global_value": "global value"} - ) + self.assertEqual(config_c2_test2.generics, {"value": 2, "global_value": "global value"}) @with_tempdir - def test_runtime_error_on_configuration_of_individual_test_with_same_sim( - self, tempdir - ): + def test_runtime_error_on_configuration_of_individual_test_with_same_sim(self, tempdir): design_unit = Entity( "tb_entity", file_name=str(Path(tempdir) / "file.vhd"), @@ -426,9 +398,7 @@ def test_run_all_in_same_sim_can_be_configured(self, tempdir): ) ], ) - self.assertEqual( - get_config_of(tests, "lib.tb_entity.cfg").generics, {"name": "value"} - ) + self.assertEqual(get_config_of(tests, "lib.tb_entity.cfg").generics, {"name": "value"}) @with_tempdir def test_global_user_attributes_not_supported_yet(self, tempdir): @@ -448,8 +418,7 @@ def test_global_user_attributes_not_supported_yet(self, tempdir): except RuntimeError as exc: self.assertEqual( str(exc), - "File global attributes are not yet supported: .attr0 in %s line 1" - % str(Path(tempdir) / "file.vhd"), + "File global attributes are not yet supported: .attr0 in %s line 1" % str(Path(tempdir) / "file.vhd"), ) else: assert False, "RuntimeError not raised" @@ -483,9 +452,7 @@ def test_test_information(self, tempdir): file_name = str(Path(tempdir) / "file.vhd") for same_sim in [True, False]: - contents = get_vhdl_test_bench( - "tb_entity", tests=["Test 1", "Test 2"], same_sim=same_sim - ) + contents = get_vhdl_test_bench("tb_entity", tests=["Test 1", "Test 2"], same_sim=same_sim) design_unit = Entity("tb_entity", file_name=file_name, contents=contents) design_unit.generic_names = ["runner_cfg", "name"] @@ -498,11 +465,7 @@ def test_test_information(self, tempdir): self.assertEqual(len(test_suites), 2) self.assertEqual( - set( - item - for test_suite in test_suites - for item in test_suite.test_information.items() - ), + set(item for test_suite in test_suites for item in test_suite.test_information.items()), set( [ ( @@ -526,9 +489,7 @@ def test_fail_on_unknown_sim_option(self, tempdir): def test_remove_verilog_comments(self): self.assertEqual(_remove_verilog_comments("a\n// foo \nb"), "a\n \nb") - self.assertEqual( - _remove_verilog_comments("a\n/* foo\n \n */ \nb"), "a\n \n \n \nb" - ) + self.assertEqual(_remove_verilog_comments("a\n/* foo\n \n */ \nb"), "a\n \n \n \nb") def test_get_line_offsets(self): self.assertEqual(_get_line_offsets(""), []) @@ -684,22 +645,19 @@ def test_duplicate_tests_cause_error(self, mock_logger): msg = error_calls[0][0][0] % error_calls[0][0][1:] self.assertEqual( msg, - 'Duplicate test "Test_3" in %s line 5 previously defined on line 3' - % file_name, + 'Duplicate test "Test_3" in %s line 5 previously defined on line 3' % file_name, ) msg = error_calls[1][0][0] % error_calls[1][0][1:] self.assertEqual( msg, - 'Duplicate test "Test_3" in %s line 6 previously defined on line 3' - % file_name, + 'Duplicate test "Test_3" in %s line 6 previously defined on line 3' % file_name, ) msg = error_calls[2][0][0] % error_calls[2][0][1:] self.assertEqual( msg, - 'Duplicate test "Test_2" in %s line 7 previously defined on line 4' - % file_name, + 'Duplicate test "Test_2" in %s line 7 previously defined on line 4' % file_name, ) def test_find_attributes(self): @@ -736,9 +694,7 @@ def test_find_user_attributes(self): attributes, [ Attribute(".foo", None, _code_file_location(code, ".foo", "file.vhd")), - Attribute( - ".foo-bar", None, _code_file_location(code, ".foo-bar", "file.vhd") - ), + Attribute(".foo-bar", None, _code_file_location(code, ".foo-bar", "file.vhd")), ], ) diff --git a/tests/unit/test_test_bench_list.py b/tests/unit/test_test_bench_list.py index 1cb4fb942..90773b224 100644 --- a/tests/unit/test_test_bench_list.py +++ b/tests/unit/test_test_bench_list.py @@ -53,19 +53,13 @@ def test_tb_filter_match_prefix_and_suffix_only(self, tempdir): Issue #263 """ with mock.patch("vunit.test.bench_list.LOGGER", autospec=True) as logger: - design_unit = Entity( - "mul_tbl_scale", file_name=str(Path(tempdir) / "file.vhd") - ) + design_unit = Entity("mul_tbl_scale", file_name=str(Path(tempdir) / "file.vhd")) self.assertFalse(tb_filter(design_unit)) self.assertFalse(logger.warning.called) @with_tempdir - def test_tb_filter_warning_on_missing_runner_cfg_when_matching_tb_pattern( - self, tempdir - ): - design_unit = Module( - "tb_module_not_ok", file_name=str(Path(tempdir) / "file.vhd") - ) + def test_tb_filter_warning_on_missing_runner_cfg_when_matching_tb_pattern(self, tempdir): + design_unit = Module("tb_module_not_ok", file_name=str(Path(tempdir) / "file.vhd")) design_unit.generic_names = [] with mock.patch("vunit.test.bench_list.LOGGER", autospec=True) as logger: @@ -87,9 +81,7 @@ def test_tb_filter_warning_on_missing_runner_cfg_when_matching_tb_pattern( @with_tempdir def test_tb_filter_warning_on_runner_cfg_but_not_matching_tb_pattern(self, tempdir): - design_unit = Entity( - "entity_ok_but_warning", file_name=str(Path(tempdir) / "file.vhd") - ) + design_unit = Entity("entity_ok_but_warning", file_name=str(Path(tempdir) / "file.vhd")) design_unit.generic_names = ["runner_cfg"] with mock.patch("vunit.test.bench_list.LOGGER", autospec=True) as logger: diff --git a/tests/unit/test_test_report.py b/tests/unit/test_test_report.py index 68ad4b736..4831b3507 100644 --- a/tests/unit/test_test_report.py +++ b/tests/unit/test_test_report.py @@ -200,12 +200,8 @@ def test_junit_report_with__with_missing_output_file(self): root = ElementTree.fromstring(report.to_junit_xml_str()) self.assertEqual(root.tag, "testsuite") self.assertEqual(len(root.findall("*")), 2) - self.assert_has_test( - root, "passed_test0", time="1.0", status="passed", output=fail_output - ) - self.assert_has_test( - root, "passed_test1", time="2.0", status="passed", output=fail_output - ) + self.assert_has_test(root, "passed_test0", time="1.0", status="passed", output=fail_output) + self.assert_has_test(root, "passed_test1", time="2.0", status="passed", output=fail_output) def test_junit_report_with_some_failed_tests(self): report = self._report_with_some_failed_tests() @@ -218,20 +214,12 @@ def test_junit_report_with_some_failed_tests(self): def test_junit_report_with_some_failed_tests_bamboo_fmt(self): report = self._report_with_some_failed_tests() - root = ElementTree.fromstring( - report.to_junit_xml_str(xunit_xml_format="bamboo") - ) + root = ElementTree.fromstring(report.to_junit_xml_str(xunit_xml_format="bamboo")) self.assertEqual(root.tag, "testsuite") self.assertEqual(len(root.findall("*")), 3) - self.assert_has_test( - root, "failed_test0", time="11.1", status="failed", fmt="bamboo" - ) - self.assert_has_test( - root, "passed_test", time="2.0", status="passed", fmt="bamboo" - ) - self.assert_has_test( - root, "failed_test1", time="3.0", status="failed", fmt="bamboo" - ) + self.assert_has_test(root, "failed_test0", time="11.1", status="failed", fmt="bamboo") + self.assert_has_test(root, "passed_test", time="2.0", status="passed", fmt="bamboo") + self.assert_has_test(root, "failed_test1", time="3.0", status="failed", fmt="bamboo") def test_junit_report_with_some_skipped_tests(self): report = self._report_with_some_skipped_tests() @@ -244,15 +232,9 @@ def test_junit_report_with_some_skipped_tests(self): def test_junit_report_with_testcase_classname(self): report = self._new_report() - report.add_result( - "test", PASSED, time=1.0, output_file_name=self.output_file_name - ) - report.add_result( - "lib.entity", PASSED, time=1.0, output_file_name=self.output_file_name - ) - report.add_result( - "lib.entity.test", PASSED, time=1.0, output_file_name=self.output_file_name - ) + report.add_result("test", PASSED, time=1.0, output_file_name=self.output_file_name) + report.add_result("lib.entity", PASSED, time=1.0, output_file_name=self.output_file_name) + report.add_result("lib.entity.test", PASSED, time=1.0, output_file_name=self.output_file_name) report.add_result( "lib.entity.config.test", PASSED, @@ -261,8 +243,7 @@ def test_junit_report_with_testcase_classname(self): ) root = ElementTree.fromstring(report.to_junit_xml_str()) names = set( - (elem.attrib.get("classname", None), elem.attrib.get("name", None)) - for elem in root.findall("testcase") + (elem.attrib.get("classname", None), elem.attrib.get("name", None)) for elem in root.findall("testcase") ) self.assertEqual( names, @@ -280,9 +261,7 @@ def test_dict_report_with_all_passed_tests(self): opath = Path(self.output_file_name).parent.parent test_path = opath / TEST_OUTPUT_PATH / "unit" output_file_name = test_path / Path(self.output_file_name).name - results = Results( - opath, None, self._report_with_all_passed_tests(output_file_name) - ) + results = Results(opath, None, self._report_with_all_passed_tests(output_file_name)) report = results.get_report() for _, test in report.tests.items(): self.assertEqual(test.path.name, test.relpath) @@ -312,54 +291,34 @@ def _report_with_all_passed_tests(self, output_file_name=None): if not output_file_name: output_file_name = self.output_file_name report = self._new_report() - report.add_result( - "passed_test0", PASSED, time=1.0, output_file_name=output_file_name - ) - report.add_result( - "passed_test1", PASSED, time=2.0, output_file_name=output_file_name - ) + report.add_result("passed_test0", PASSED, time=1.0, output_file_name=output_file_name) + report.add_result("passed_test1", PASSED, time=2.0, output_file_name=output_file_name) report.set_expected_num_tests(2) return report def _report_with_missing_tests(self): "@returns A report with all passed tests" report = self._new_report() - report.add_result( - "passed_test0", PASSED, time=1.0, output_file_name=self.output_file_name - ) - report.add_result( - "passed_test1", PASSED, time=2.0, output_file_name=self.output_file_name - ) + report.add_result("passed_test0", PASSED, time=1.0, output_file_name=self.output_file_name) + report.add_result("passed_test1", PASSED, time=2.0, output_file_name=self.output_file_name) report.set_expected_num_tests(3) return report def _report_with_some_failed_tests(self): "@returns A report with some failed tests" report = self._new_report() - report.add_result( - "failed_test0", FAILED, time=11.12, output_file_name=self.output_file_name - ) - report.add_result( - "passed_test", PASSED, time=2.0, output_file_name=self.output_file_name - ) - report.add_result( - "failed_test1", FAILED, time=3.0, output_file_name=self.output_file_name - ) + report.add_result("failed_test0", FAILED, time=11.12, output_file_name=self.output_file_name) + report.add_result("passed_test", PASSED, time=2.0, output_file_name=self.output_file_name) + report.add_result("failed_test1", FAILED, time=3.0, output_file_name=self.output_file_name) report.set_expected_num_tests(3) return report def _report_with_some_skipped_tests(self): "@returns A report with some skipped tests" report = self._new_report() - report.add_result( - "passed_test", PASSED, time=1.0, output_file_name=self.output_file_name - ) - report.add_result( - "skipped_test", SKIPPED, time=0.0, output_file_name=self.output_file_name - ) - report.add_result( - "failed_test", FAILED, time=3.0, output_file_name=self.output_file_name - ) + report.add_result("passed_test", PASSED, time=1.0, output_file_name=self.output_file_name) + report.add_result("skipped_test", SKIPPED, time=0.0, output_file_name=self.output_file_name) + report.add_result("failed_test", FAILED, time=3.0, output_file_name=self.output_file_name) report.set_expected_num_tests(3) return report diff --git a/tests/unit/test_test_runner.py b/tests/unit/test_test_runner.py index 9d68d498c..92b79e1de 100644 --- a/tests/unit/test_test_runner.py +++ b/tests/unit/test_test_runner.py @@ -146,10 +146,7 @@ def test_get_output_path_on_linux(self): test_output = runner._get_output_path(test_name) self.assertEqual( test_output, - str( - Path(output_path).resolve() - / (test_name + "_" + hash_string(test_name)) - ), + str(Path(output_path).resolve() / (test_name + "_" + hash_string(test_name))), ) output_path = "output_path" @@ -157,10 +154,7 @@ def test_get_output_path_on_linux(self): test_output = runner._get_output_path(test_name) self.assertEqual( test_output, - str( - Path(output_path).resolve() - / (test_name + "_" + hash_string(test_name)) - ), + str(Path(output_path).resolve() / (test_name + "_" + hash_string(test_name))), ) output_path = "output_path" @@ -169,10 +163,7 @@ def test_get_output_path_on_linux(self): test_output = runner._get_output_path(test_name) self.assertEqual( test_output, - str( - Path(output_path).resolve() - / (safe_name + "_" + hash_string(test_name)) - ), + str(Path(output_path).resolve() / (safe_name + "_" + hash_string(test_name))), ) def test_get_output_path_on_windows(self): @@ -186,18 +177,13 @@ def test_get_output_path_on_windows(self): test_output = runner._get_output_path(test_name) self.assertEqual(len(test_output), 260 - 100 + 1) - with mock.patch( - "os.environ", new={"VUNIT_TEST_OUTPUT_PATH_MARGIN": "-1000"} - ): + with mock.patch("os.environ", new={"VUNIT_TEST_OUTPUT_PATH_MARGIN": "-1000"}): output_path = "output_path" test_name = "_" * 400 test_output = runner._get_output_path(test_name) self.assertEqual( test_output, - str( - Path(output_path).resolve() - / (test_name + "_" + hash_string(test_name)) - ), + str(Path(output_path).resolve() / (test_name + "_" + hash_string(test_name))), ) with mock.patch("os.environ", new={"VUNIT_SHORT_TEST_OUTPUT_PATHS": ""}): diff --git a/tests/unit/test_test_suites.py b/tests/unit/test_test_suites.py index e19e4d939..da925e83a 100644 --- a/tests/unit/test_test_suites.py +++ b/tests/unit/test_test_suites.py @@ -106,9 +106,7 @@ def _read_test_results(self, expected, contents): test_suite_name=None, test_cases=expected, ) - results = run._read_test_results( # pylint: disable=protected-access - file_name=file_name - ) + results = run._read_test_results(file_name=file_name) # pylint: disable=protected-access self.assertEqual(results, expected) return results @@ -183,9 +181,7 @@ def func(): test_cases=expected, ) - results = run._read_test_results( # pylint: disable=protected-access - file_name=file_name - ) + results = run._read_test_results(file_name=file_name) # pylint: disable=protected-access self.assertEqual( run._check_results(results, sim_ok), # pylint: disable=protected-access (waschecked, expected), diff --git a/tests/unit/test_tokenizer.py b/tests/unit/test_tokenizer.py index 0d849a39d..603dec0c8 100644 --- a/tests/unit/test_tokenizer.py +++ b/tests/unit/test_tokenizer.py @@ -114,9 +114,7 @@ def test_describe_missing_location(self): ) def test_describe_none_filename_location(self): - self.assertEqual( - describe_location(((None, (0, 0)), None)), "Unknown Python string" - ) + self.assertEqual(describe_location(((None, (0, 0)), None)), "Unknown Python string") def _describe_location(*codes): @@ -137,12 +135,8 @@ def _describe_location(*codes): location = ((filename, (start, end)), location) - with mock.patch( - "vunit.parsing.tokenizer.read_file", autospec=True - ) as mock_read_file: - with mock.patch( - "vunit.parsing.tokenizer.file_exists", autospec=True - ) as mock_file_exists: + with mock.patch("vunit.parsing.tokenizer.read_file", autospec=True) as mock_read_file: + with mock.patch("vunit.parsing.tokenizer.file_exists", autospec=True) as mock_file_exists: def file_exists_side_effect(filename): return filename in contents diff --git a/tests/unit/test_ui.py b/tests/unit/test_ui.py index 00a7381a3..85d778884 100644 --- a/tests/unit/test_ui.py +++ b/tests/unit/test_ui.py @@ -150,12 +150,8 @@ def test_locally_specified_preprocessors_should_be_used_instead_of_any_globally_ end architecture; """ ) - self.assertFalse( - (Path(self._preprocessed_path) / "lib" / Path(file_name1).name).exists() - ) - with ( - Path(self._preprocessed_path) / "lib" / Path(file_name2).name - ).open() as fread: + self.assertFalse((Path(self._preprocessed_path) / "lib" / Path(file_name1).name).exists()) + with (Path(self._preprocessed_path) / "lib" / Path(file_name2).name).open() as fread: expectd = pp_source.substitute( entity="ent2", report='log("Here I am!"); -- VUnitfier preprocessor: Report turned off, keeping original code.', @@ -190,12 +186,8 @@ def test_recovers_from_preprocessing_error(self, logger): self.create_file(str(file_name), contents) ui.add_source_file(file_name, "lib") - logger.assert_called_once_with( - "Failed to preprocess %s", str(Path(file_name).resolve()) - ) - self.assertFalse( - (Path(self._preprocessed_path) / "lib" / file_name.name).exists() - ) + logger.assert_called_once_with("Failed to preprocess %s", str(Path(file_name).resolve())) + self.assertFalse((Path(self._preprocessed_path) / "lib" / file_name.name).exists()) def test_supported_source_file_suffixes(self): """Test adding a supported filetype, of any case, is accepted.""" @@ -205,9 +197,7 @@ def test_supported_source_file_suffixes(self): allowable_extensions = list(accepted_extensions) allowable_extensions.extend([ext.upper() for ext in accepted_extensions]) allowable_extensions.append( - VHDL_EXTENSIONS[0][0] - + VHDL_EXTENSIONS[0][1].upper() - + VHDL_EXTENSIONS[0][2:] + VHDL_EXTENSIONS[0][0] + VHDL_EXTENSIONS[0][1].upper() + VHDL_EXTENSIONS[0][2:] ) # mixed case for idx, ext in enumerate(allowable_extensions): file_name = self.create_entity_file(idx, ext) @@ -241,9 +231,7 @@ def test_exception_on_adding_zero_files(self): def test_no_exception_on_adding_zero_files_when_allowed(self): ui = self._create_ui() lib = ui.add_library("lib") - lib.add_source_files( - str(Path(__file__).parent / "missing.vhd"), allow_empty=True - ) + lib.add_source_files(str(Path(__file__).parent / "missing.vhd"), allow_empty=True) def test_get_test_benchs_and_test(self): ui = self._create_ui() @@ -292,9 +280,7 @@ def test_get_test_benchs_and_test(self): [test_bench.name for test_bench in lib.get_test_benches()], ["tb_ent", "tb_ent2"], ) - self.assertEqual( - [test_bench.name for test_bench in lib.get_test_benches("*2")], ["tb_ent2"] - ) + self.assertEqual([test_bench.name for test_bench in lib.get_test_benches("*2")], ["tb_ent2"]) self.assertEqual(lib.test_bench("tb_ent").test("test1").name, "test1") self.assertEqual(lib.test_bench("tb_ent").test("test2").name, "test2") @@ -303,12 +289,8 @@ def test_get_test_benchs_and_test(self): [test.name for test in lib.test_bench("tb_ent").get_tests()], ["test1", "test2"], ) - self.assertEqual( - [test.name for test in lib.test_bench("tb_ent").get_tests("*1")], ["test1"] - ) - self.assertEqual( - [test.name for test in lib.test_bench("tb_ent2").get_tests()], [] - ) + self.assertEqual([test.name for test in lib.test_bench("tb_ent").get_tests("*1")], ["test1"]) + self.assertEqual([test.name for test in lib.test_bench("tb_ent2").get_tests()], []) def test_get_entities_case_insensitive(self): ui = self._create_ui() @@ -400,9 +382,7 @@ def test_add_source_files_from_csv_return(self): ui = self._create_ui() source_files = ui.add_source_files_from_csv("test_returns.csv") - self.assertEqual( - [source_file.name for source_file in source_files], list_of_files - ) + self.assertEqual([source_file.name for source_file in source_files], list_of_files) def test_add_source_files_errors(self): ui = self._create_ui() @@ -414,9 +394,7 @@ def test_add_source_files_errors(self): lib.add_source_files, ["missing.vhd", "file.vhd"], ) - self.assertRaisesRegex( - ValueError, r"missing\.vhd", lib.add_source_files, "missing.vhd" - ) + self.assertRaisesRegex(ValueError, r"missing\.vhd", lib.add_source_files, "missing.vhd") def test_get_source_files(self): ui = self._create_ui() @@ -526,35 +504,25 @@ def check_stdout(ui, expected): ui = self._create_ui("--list", "--with-attribute=.attr0") setup(ui) - check_stdout( - ui, "lib.tb_filter.Test 1\n" "lib.tb_filter.Test 2\n" "Listed 2 tests" - ) + check_stdout(ui, "lib.tb_filter.Test 1\n" "lib.tb_filter.Test 2\n" "Listed 2 tests") ui = self._create_ui("--list", "--with-attribute=.attr2") setup(ui) check_stdout(ui, "lib.tb_filter.Test 4\n" "Listed 1 tests") - ui = self._create_ui( - "--list", "--with-attributes", ".attr0", "--with-attributes", ".attr1" - ) + ui = self._create_ui("--list", "--with-attributes", ".attr0", "--with-attributes", ".attr1") setup(ui) check_stdout(ui, "lib.tb_filter.Test 2\n" "Listed 1 tests") ui = self._create_ui("--list", "--without-attributes", ".attr0") setup(ui) - check_stdout( - ui, "lib.tb_filter.Test 3\n" "lib.tb_filter.Test 4\n" "Listed 2 tests" - ) + check_stdout(ui, "lib.tb_filter.Test 3\n" "lib.tb_filter.Test 4\n" "Listed 2 tests") - ui = self._create_ui( - "--list", "--without-attributes", ".attr0", "--without-attributes", ".attr1" - ) + ui = self._create_ui("--list", "--without-attributes", ".attr0", "--without-attributes", ".attr1") setup(ui) check_stdout(ui, "lib.tb_filter.Test 4\n" "Listed 1 tests") - ui = self._create_ui( - "--list", "--with-attributes", ".attr0", "--without-attributes", ".attr1" - ) + ui = self._create_ui("--list", "--with-attributes", ".attr0", "--without-attributes", ".attr1") setup(ui) check_stdout(ui, "lib.tb_filter.Test 1\n" "Listed 1 tests") @@ -587,17 +555,11 @@ def test_export_json(self, tempdir): data = json.load(fptr) # Check known keys - self.assertEqual( - set(data.keys()), set(["export_format_version", "files", "tests"]) - ) + self.assertEqual(set(data.keys()), set(["export_format_version", "files", "tests"])) # Check that export format is semantic version with integer values - self.assertEqual( - set(data["export_format_version"].keys()), set(("major", "minor", "patch")) - ) - assert all( - isinstance(value, int) for value in data["export_format_version"].values() - ) + self.assertEqual(set(data["export_format_version"].keys()), set(("major", "minor", "patch"))) + assert all(isinstance(value, int) for value in data["export_format_version"].values()) # Check the contents of the files section self.assertEqual( @@ -612,10 +574,7 @@ def test_export_json(self, tempdir): # Check the contents of the tests section self.assertEqual( - { - item["name"]: (item["location"], item["attributes"]) - for item in data["tests"] - }, + {item["name"]: (item["location"], item["attributes"]) for item in data["tests"]}, { "lib1.tb_foo.all": ( {"file_name": file_name1, "offset": 180, "length": 18}, @@ -660,9 +619,7 @@ def test_get_source_files_errors(self): ui.get_source_files, non_existant_name, ) - self.assertEqual( - len(ui.get_source_files(non_existant_name, allow_empty=True)), 0 - ) + self.assertEqual(len(ui.get_source_files(non_existant_name, allow_empty=True)), 0) self.assertRaisesRegex( ValueError, @@ -733,9 +690,7 @@ def test_add_fileset_manual_dependencies(self): foo_files.add_dependency_on(bar_file) for foo_file in foo_files: - self.assertEqual( - names(ui.get_compile_order([foo_file])), names([bar_file, foo_file]) - ) + self.assertEqual(names(ui.get_compile_order([foo_file])), names([bar_file, foo_file])) def _create_ui_with_mocked_project_add_source_file(self): """ @@ -773,16 +728,8 @@ def check(action): no_parse=False, ) - check( - lambda ui, _: ui.add_source_files( - file_name, "lib", include_dirs=include_dirs - ) - ) - check( - lambda ui, _: ui.add_source_file( - file_name, "lib", include_dirs=include_dirs - ) - ) + check(lambda ui, _: ui.add_source_files(file_name, "lib", include_dirs=include_dirs)) + check(lambda ui, _: ui.add_source_file(file_name, "lib", include_dirs=include_dirs)) check(lambda _, lib: lib.add_source_files(file_name, include_dirs=include_dirs)) check(lambda _, lib: lib.add_source_file(file_name, include_dirs=include_dirs)) @@ -915,9 +862,7 @@ def test_add_source_file_vhdl_standard_is_used(self): elif method == 2: source_file = ui.add_source_file(file_name, "lib", vhdl_standard="2008") elif method == 3: - source_file = ui.add_source_files( - file_name, "lib", vhdl_standard="2008" - )[0] + source_file = ui.add_source_files(file_name, "lib", vhdl_standard="2008")[0] self.assertEqual(source_file.vhdl_standard, "2008") @@ -1077,10 +1022,7 @@ def test_scan_tests_from_other_file(self): test_bench.scan_tests_from_file(tests_file_name) self.assertEqual( - [ - test.name - for test in ui.library("lib").test_bench("tb_top").get_tests() - ], + [test.name for test in ui.library("lib").test_bench("tb_top").get_tests()], ["test1", "test2"], ) @@ -1101,9 +1043,7 @@ def test_scan_tests_from_other_file_missing(self): """, ) lib.add_source_file(tb_file_name) - self.assertRaises( - ValueError, lib.test_bench("tb_top").scan_tests_from_file, "missing.sv" - ) + self.assertRaises(ValueError, lib.test_bench("tb_top").scan_tests_from_file, "missing.sv") def test_can_list_tests_without_simulator(self): with set_env(): @@ -1121,9 +1061,7 @@ def test_compile_without_simulator_fails(self, logger): ui = self._create_ui_real_sim("--compile") self._run_main(ui, 1) self.assertEqual(len(logger.error.mock_calls), 1) - self.assertTrue( - "No available simulator detected" in str(logger.error.mock_calls) - ) + self.assertTrue("No available simulator detected" in str(logger.error.mock_calls)) @mock.patch("vunit.ui.LOGGER", autospec=True) def test_simulate_without_simulator_fails(self, logger): @@ -1131,9 +1069,7 @@ def test_simulate_without_simulator_fails(self, logger): ui = self._create_ui_real_sim() self._run_main(ui, 1) self.assertEqual(len(logger.error.mock_calls), 1) - self.assertTrue( - "No available simulator detected" in str(logger.error.mock_calls) - ) + self.assertTrue("No available simulator detected" in str(logger.error.mock_calls)) def test_set_sim_option_before_adding_file(self): """ @@ -1193,9 +1129,7 @@ def test_get_testbench_files(self): lib.add_source_file("tb_ent.vhd") lib.add_source_file("tb_ent2.vhd") simulator_if = ui._create_simulator_if() # pylint: disable=protected-access - target_files = ui._get_testbench_files( # pylint: disable=protected-access - simulator_if - ) + target_files = ui._get_testbench_files(simulator_if) # pylint: disable=protected-access expected = [ lib.get_source_file(fname)._source_file # pylint: disable=protected-access for fname in ["tb_ent2.vhd", "tb_ent.vhd"] @@ -1300,9 +1234,7 @@ class VUnitfier(object): """ def __init__(self): - self._report_pattern = re.compile( - r'^(?P\s*)report\s*(?P"[^"]*")\s*;', MULTILINE - ) + self._report_pattern = re.compile(r'^(?P\s*)report\s*(?P"[^"]*")\s*;', MULTILINE) def run(self, code, file_name): # pylint: disable=unused-argument return self._report_pattern.sub( @@ -1341,9 +1273,7 @@ def compile_source_file_command(source_file): # pylint: disable=unused-argument return True @staticmethod - def simulate( # pylint: disable=unused-argument - output_path, test_suite_name, config, elaborate_only - ): + def simulate(output_path, test_suite_name, config, elaborate_only): # pylint: disable=unused-argument return True diff --git a/tests/unit/test_verilog_preprocessor.py b/tests/unit/test_verilog_preprocessor.py index ae0501e42..9f437c1b5 100644 --- a/tests/unit/test_verilog_preprocessor.py +++ b/tests/unit/test_verilog_preprocessor.py @@ -64,23 +64,17 @@ def test_preprocess_define_with_lpar_value(self): def test_preprocess_define_with_one_arg(self): result = self.preprocess("`define foo(arg)arg 123") result.assert_has_tokens("") - result.assert_has_defines( - {"foo": Macro("foo", tokenize("arg 123"), args=("arg",))} - ) + result.assert_has_defines({"foo": Macro("foo", tokenize("arg 123"), args=("arg",))}) def test_preprocess_define_with_one_arg_ignores_initial_space(self): result = self.preprocess("`define foo(arg) arg 123") result.assert_has_tokens("") - result.assert_has_defines( - {"foo": Macro("foo", tokenize("arg 123"), args=("arg",))} - ) + result.assert_has_defines({"foo": Macro("foo", tokenize("arg 123"), args=("arg",))}) def test_preprocess_define_with_multiple_args(self): result = self.preprocess("`define foo( arg1, arg2)arg1 arg2") result.assert_has_tokens("") - result.assert_has_defines( - {"foo": Macro("foo", tokenize("arg1 arg2"), args=("arg1", "arg2"))} - ) + result.assert_has_defines({"foo": Macro("foo", tokenize("arg1 arg2"), args=("arg1", "arg2"))}) def test_preprocess_define_with_default_values(self): result = self.preprocess("`define foo(arg1, arg2=default)arg1 arg2") @@ -146,18 +140,14 @@ def test_preprocess_substitute_define_with_default_values(self): def test_preprocess_include_directive(self): self.write_file("include.svh", "hello hey") - result = self.preprocess( - '`include "include.svh"', include_paths=[self.output_path] - ) + result = self.preprocess('`include "include.svh"', include_paths=[self.output_path]) result.assert_has_tokens("hello hey") result.assert_included_files([str(Path(self.output_path) / "include.svh")]) def test_detects_circular_includes(self): self.write_file("include1.svh", '`include "include2.svh"') self.write_file("include2.svh", '`include "include1.svh"') - result = self.preprocess( - '`include "include1.svh"', include_paths=[self.output_path] - ) + result = self.preprocess('`include "include1.svh"', include_paths=[self.output_path]) result.logger.error.assert_called_once_with( "Circular `include of include2.svh detected\n%s", "from fn.v line 1:\n" @@ -176,9 +166,7 @@ def test_detects_circular_includes(self): def test_detects_circular_include_of_self(self): self.write_file("include.svh", '`include "include.svh"') - result = self.preprocess( - '`include "include.svh"', include_paths=[self.output_path] - ) + result = self.preprocess('`include "include.svh"', include_paths=[self.output_path]) result.logger.error.assert_called_once_with( "Circular `include of include.svh detected\n%s", "from fn.v line 1:\n" @@ -194,9 +182,7 @@ def test_detects_circular_include_of_self(self): def test_does_not_detect_non_circular_includes(self): self.write_file("include3.svh", "keep") - self.write_file( - "include1.svh", '`include "include3.svh"\n`include "include2.svh"' - ) + self.write_file("include1.svh", '`include "include3.svh"\n`include "include2.svh"') self.write_file("include2.svh", '`include "include3.svh"') result = self.preprocess( '`include "include1.svh"\n`include "include2.svh"', @@ -413,9 +399,7 @@ def test_nested_ifdef(self): `endif keep""" ) - result.assert_has_tokens( - "outer_before\n" "inner_else\n" "inner_elsif\n" "outer_after\n" "keep" - ) + result.assert_has_tokens("outer_before\n" "inner_else\n" "inner_elsif\n" "outer_after\n" "keep") def test_preprocess_broken_define(self): result = self.preprocess("`define") @@ -587,14 +571,10 @@ def test_substitute_undefined(self): result = self.preprocess("`foo") result.assert_has_tokens("") # Debug since there are many custon `names in tools - result.logger.debug.assert_called_once_with( - "Verilog undefined name\n%s", "at fn.v line 1:\n" "`foo\n" "~~~~" - ) + result.logger.debug.assert_called_once_with("Verilog undefined name\n%s", "at fn.v line 1:\n" "`foo\n" "~~~~") def test_preprocess_include_directive_missing_file(self): - result = self.preprocess( - '`include "missing.svh"', include_paths=[self.output_path] - ) + result = self.preprocess('`include "missing.svh"', include_paths=[self.output_path]) result.assert_has_tokens("") result.assert_included_files([]) # Is debug message since there are so many builtin includes in tools @@ -614,9 +594,7 @@ def test_preprocess_include_directive_missing_argument(self): def test_preprocess_include_directive_bad_argument(self): self.write_file("include.svh", "hello hey") - result = self.preprocess( - '`include foo "include.svh"', include_paths=[self.output_path] - ) + result = self.preprocess('`include foo "include.svh"', include_paths=[self.output_path]) result.assert_has_tokens(' "include.svh"') result.assert_included_files([]) result.logger.warning.assert_called_once_with( @@ -670,9 +648,7 @@ def test_preprocess_include_directive_from_define_not_defined(self): def test_preprocess_error_in_include_file(self): self.write_file("include.svh", "`include foo") - result = self.preprocess( - '\n\n`include "include.svh"', include_paths=[self.output_path] - ) + result = self.preprocess('\n\n`include "include.svh"', include_paths=[self.output_path]) result.assert_has_tokens("\n\n") result.assert_included_files([str(Path(self.output_path) / "include.svh")]) result.logger.warning.assert_called_once_with( @@ -845,12 +821,8 @@ def preprocess(self, code, file_name="fn.v", include_paths=None): tokens = tokenizer.tokenize(code, file_name=file_name) defines = {} included_files = [] - with mock.patch( - "vunit.parsing.verilog.preprocess.LOGGER", autospec=True - ) as logger: - tokens = preprocessor.preprocess( - tokens, defines, include_paths, included_files - ) + with mock.patch("vunit.parsing.verilog.preprocess.LOGGER", autospec=True) as logger: + tokens = preprocessor.preprocess(tokens, defines, include_paths, included_files) return PreprocessResult( self, tokens, @@ -929,9 +901,7 @@ def macro_strip_loc(define): define.defaults[key] = strip_loc(value) for key in self.defines: - self.test.assertEqual( - macro_strip_loc(self.defines[key]), macro_strip_loc(defines[key]) - ) + self.test.assertEqual(macro_strip_loc(self.defines[key]), macro_strip_loc(defines[key])) def assert_no_log(self): """ diff --git a/tests/unit/test_verilog_tokenizer.py b/tests/unit/test_verilog_tokenizer.py index 9d713828d..27eb4aaa7 100644 --- a/tests/unit/test_verilog_tokenizer.py +++ b/tests/unit/test_verilog_tokenizer.py @@ -162,6 +162,4 @@ def preprocess(tokens): # pylint: disable=missing-docstring return tokens - self.assertEqual( - preprocess(list(self.tokenizer.tokenize(code, "fn.v"))), tokens - ) + self.assertEqual(preprocess(list(self.tokenizer.tokenize(code, "fn.v"))), tokens) diff --git a/tests/unit/test_vhdl_parser.py b/tests/unit/test_vhdl_parser.py index 7a6f0d8bd..a7bbedf00 100644 --- a/tests/unit/test_vhdl_parser.py +++ b/tests/unit/test_vhdl_parser.py @@ -233,9 +233,7 @@ def test_parsing_entity_with_generics(self): self.assertEqual(generics[0].identifier, "max_value") self.assertEqual(generics[0].init_value, "(2-19)*4") self.assertEqual(generics[0].mode, None) - self.assertEqual( - generics[0].subtype_indication.code, "integer range 2-2 to 2**10" - ) + self.assertEqual(generics[0].subtype_indication.code, "integer range 2-2 to 2**10") self.assertEqual(generics[0].subtype_indication.type_mark, "integer") # @TODO does not work # self.assertEqual(generics[0].subtypeIndication.constraint, "range 2-2 to 2**10") @@ -321,9 +319,7 @@ def test_parsing_entity_with_ports(self): self.assertEqual(ports[1].identifier, "data") self.assertEqual(ports[1].init_value, None) self.assertEqual(ports[1].mode, "out") - self.assertEqual( - ports[1].subtype_indication.code, "std_logic_vector(11-1 downto 0)" - ) + self.assertEqual(ports[1].subtype_indication.code, "std_logic_vector(11-1 downto 0)") self.assertEqual(ports[1].subtype_indication.type_mark, "std_logic_vector") self.assertEqual(ports[1].subtype_indication.constraint, "(11-1 downto 0)") @@ -480,10 +476,7 @@ def test_that_array_type_declarations_are_found(self): type constrained_badgers_array_t is array ( -1 downto 0 ) of badger_t; type unconstrained_natural_array_t is array ( integer range <> ) of natural; """ - arrays = { - e.identifier: e.subtype_indication.type_mark - for e in VHDLArrayType.find(code) - } + arrays = {e.identifier: e.subtype_indication.type_mark for e in VHDLArrayType.find(code)} expect = { "constrained_integer_array_t": "integer", "unconstrained_fish_array_t": "fish_t", @@ -513,13 +506,9 @@ def test_that_record_type_declarations_are_found(self): self.assertIn("space_time_t", records) self.assertEqual(records["space_time_t"][0].identifier_list, ["x", "y", "z"]) - self.assertEqual( - records["space_time_t"][0].subtype_indication.type_mark, "real" - ) + self.assertEqual(records["space_time_t"][0].subtype_indication.type_mark, "real") self.assertEqual(records["space_time_t"][1].identifier_list, ["t"]) - self.assertEqual( - records["space_time_t"][1].subtype_indication.type_mark, "time" - ) + self.assertEqual(records["space_time_t"][1].subtype_indication.type_mark, "time") self.assertIn("complex_t", records) self.assertEqual(records["complex_t"][0].identifier_list, ["im", "re"]) @@ -527,12 +516,8 @@ def test_that_record_type_declarations_are_found(self): self.assertIn("foo", records) self.assertEqual(records["foo"][0].identifier_list, ["bar"]) - self.assertEqual( - records["foo"][0].subtype_indication.type_mark, "std_logic_vector" - ) - self.assertEqual( - records["foo"][0].subtype_indication.constraint, "(7 downto 0)" - ) + self.assertEqual(records["foo"][0].subtype_indication.type_mark, "std_logic_vector") + self.assertEqual(records["foo"][0].subtype_indication.constraint, "(7 downto 0)") self.assertTrue(records["foo"][0].subtype_indication.array_type) def test_remove_comments(self): @@ -579,13 +564,9 @@ def _create_entity(): """ Helper function to create a VHDLEntity """ - data_width = VHDLInterfaceElement( - "data_width", VHDLSubtypeIndication.parse("natural := 16") - ) + data_width = VHDLInterfaceElement("data_width", VHDLSubtypeIndication.parse("natural := 16")) - clk = VHDLInterfaceElement( - "clk", VHDLSubtypeIndication.parse("std_logic"), "in" - ) + clk = VHDLInterfaceElement("clk", VHDLSubtypeIndication.parse("std_logic"), "in") data = VHDLInterfaceElement( "data", VHDLSubtypeIndication.parse("std_logic_vector(data_width-1 downto 0)"), diff --git a/tools/create_release_notes.py b/tools/create_release_notes.py index c80997a3e..90e3c5499 100644 --- a/tools/create_release_notes.py +++ b/tools/create_release_notes.py @@ -22,9 +22,7 @@ def get_releases(source_path: Path): """ release_notes = source_path / "release_notes" releases = [] - for idx, file_name in enumerate( - sorted(glob(str(release_notes / "*.rst")), reverse=True) - ): + for idx, file_name in enumerate(sorted(glob(str(release_notes / "*.rst")), reverse=True)): releases.append(Release(file_name, is_latest=idx == 0)) return releases @@ -72,10 +70,7 @@ def create_release_notes(): fptr.write(title + "\n") fptr.write("-" * len(title) + "\n\n") - fptr.write( - "\n`Download from PyPI `__" - % release.name - ) + fptr.write("\n`Download from PyPI `__" % release.name) if not is_last: fptr.write( diff --git a/tools/docs_utils.py b/tools/docs_utils.py index 102e17c64..4e8386e20 100644 --- a/tools/docs_utils.py +++ b/tools/docs_utils.py @@ -33,8 +33,7 @@ def examples(): if loc.is_dir(): _data = _get_eg_doc( loc, - "https://github.com/VUnit/vunit/tree/master/examples/%s/%s" - % (subdir, item), + "https://github.com/VUnit/vunit/tree/master/examples/%s/%s" % (subdir, item), ) if _data: egs_fptr.write(_data) @@ -47,11 +46,7 @@ def _get_eg_doc(location: Path, ref): nstr = str(location.name) if not (location / "run.py").is_file(): - print( - "WARNING: Example subdir '" - + nstr - + "' does not contain a 'run.py' file. Skipping..." - ) + print("WARNING: Example subdir '" + nstr + "' does not contain a 'run.py' file. Skipping...") return None print("Generating '_main.py' from 'run.py' in '" + nstr + "'...") @@ -70,11 +65,7 @@ def _get_eg_doc(location: Path, ref): remove(str(location / "_main.py")) if not eg_doc: - print( - "WARNING: 'run.py' file in example subdir '" - + nstr - + "' does not contain a docstring. Skipping..." - ) + print("WARNING: 'run.py' file in example subdir '" + nstr + "' does not contain a docstring. Skipping...") return "" title = "`%s <%s/>`_" % (eg_doc.split("---", 1)[0][0:-1], ref) diff --git a/tools/release.py b/tools/release.py index ec1fb6c38..277c9184b 100755 --- a/tools/release.py +++ b/tools/release.py @@ -88,31 +88,20 @@ def validate_new_release(version, pre_tag): release_note = release_note_file_name(version) if not release_note.exists(): - print( - "Not releasing version %s since release note %s does not exist" - % (version, str(release_note)) - ) + print("Not releasing version %s since release note %s does not exist" % (version, str(release_note))) sys.exit(1) with release_note.open("r") as fptr: if not fptr.read(): - print( - "Not releasing version %s since release note %s is empty" - % (version, str(release_note)) - ) + print("Not releasing version %s since release note %s is empty" % (version, str(release_note))) sys.exit(1) if pre_tag and check_tag(version): - print( - "Not creating new release %s since tag v%s already exist" - % (version, version) - ) + print("Not creating new release %s since tag v%s already exist" % (version, version)) sys.exit(1) if not pre_tag and not check_tag(version): - print( - "Not releasing version %s since tag v%s does not exist" % (version, version) - ) + print("Not releasing version %s since tag v%s does not exist" % (version, version)) sys.exit(1) with urlopen("https://pypi.python.org/pypi/vunit_hdl/json") as fptr: @@ -139,9 +128,7 @@ def set_version(version): content = fptr.read() print("Set local version to %s" % version) - content = content.replace( - 'VERSION = "%s"' % get_local_version(), 'VERSION = "%s"' % version - ) + content = content.replace('VERSION = "%s"' % get_local_version(), 'VERSION = "%s"' % version) with ABOUT_PY.open("w") as fptr: fptr.write(content) @@ -158,21 +145,13 @@ def get_local_version(): Return the local python package version and check if corresponding release notes exist """ - version = ( - subprocess.check_output( - [sys.executable, str(REPO_ROOT / "setup.py"), "--version"] - ) - .decode() - .strip() - ) + version = subprocess.check_output([sys.executable, str(REPO_ROOT / "setup.py"), "--version"]).decode().strip() return version def check_tag(version): - return "v" + version in set( - subprocess.check_output([which("git"), "tag", "--list"]).decode().splitlines() - ) + return "v" + version in set(subprocess.check_output([which("git"), "tag", "--list"]).decode().splitlines()) def run(cmd): diff --git a/vunit/builtins.py b/vunit/builtins.py index f66daad01..9561909a8 100644 --- a/vunit/builtins.py +++ b/vunit/builtins.py @@ -47,10 +47,7 @@ def _add_files(self, pattern=None, allow_empty=True): """ Add files with naming convention to indicate which standard is supported """ - supports_context = ( - self._simulator_class.supports_vhdl_contexts() - and self._vhdl_standard.supports_context - ) + supports_context = self._simulator_class.supports_vhdl_contexts() and self._vhdl_standard.supports_context for file_name in get_checked_file_names_from_globs(pattern, allow_empty): base_file_name = Path(file_name).name @@ -87,17 +84,8 @@ def _add_data_types(self, external=None): for key in ["string", "integer_vector"]: self._add_files( - pattern=str( - VHDL_PATH - / "data_types" - / "src" - / "api" - / ("external_%s_pkg.vhd" % key) - ) - if external is None - or key not in external - or not external[key] - or external[key] is True + pattern=str(VHDL_PATH / "data_types" / "src" / "api" / ("external_%s_pkg.vhd" % key)) + if external is None or key not in external or not external[key] or external[key] is True else external[key], allow_empty=False, ) @@ -110,8 +98,7 @@ def _add_array_util(self): raise RuntimeError("Array util only supports vhdl 2008 and later") arr_deprecation_note = ( - "'array_t' is deprecated and it will removed in future releases;" - "use 'integer_array_t' instead" + "'array_t' is deprecated and it will removed in future releases; use 'integer_array_t' instead" ) warn(arr_deprecation_note, Warning) @@ -131,9 +118,7 @@ def _add_com(self): Add com library """ if not self._vhdl_standard >= VHDL.STD_2008: - raise RuntimeError( - "Communication package only supports vhdl 2008 and later" - ) + raise RuntimeError("Communication package only supports vhdl 2008 and later") self._add_files(VHDL_PATH / "com" / "src" / "*.vhd") @@ -142,9 +127,7 @@ def _add_verification_components(self): Add verification component library """ if not self._vhdl_standard >= VHDL.STD_2008: - raise RuntimeError( - "Verification component library only supports vhdl 2008 and later" - ) + raise RuntimeError("Verification component library only supports vhdl 2008 and later") self._add_files(VHDL_PATH / "verification_components" / "src" / "*.vhd") def _add_osvvm(self): @@ -159,9 +142,7 @@ def _add_osvvm(self): library = self._vunit_obj.add_library(library_name) simulator_coverage_api = self._simulator_class.get_osvvm_coverage_api() - supports_vhdl_package_generics = ( - self._simulator_class.supports_vhdl_package_generics() - ) + supports_vhdl_package_generics = self._simulator_class.supports_vhdl_package_generics() if not osvvm_is_installed(): raise RuntimeError( @@ -179,12 +160,8 @@ def _add_osvvm(self): if (bname == "AlertLogPkg_body_BVUL.vhd") or ("2019" in bname): continue - if ( - (simulator_coverage_api != "rivierapro") - and (bname == "VendorCovApiPkg_Aldec.vhd") - ) or ( - (simulator_coverage_api == "rivierapro") - and (bname == "VendorCovApiPkg.vhd") + if ((simulator_coverage_api != "rivierapro") and (bname == "VendorCovApiPkg_Aldec.vhd")) or ( + (simulator_coverage_api == "rivierapro") and (bname == "VendorCovApiPkg.vhd") ): continue diff --git a/vunit/cached.py b/vunit/cached.py index b550883d0..6bf5a899f 100644 --- a/vunit/cached.py +++ b/vunit/cached.py @@ -24,9 +24,7 @@ def cached(key, function, file_name, encoding, database=None, newline=None): return function(content) function_key = ("%s(%s, newline=%s)" % (key, file_name, newline)).encode() - content, content_hash = _file_content_hash( - file_name, encoding, database, newline=newline - ) + content, content_hash = _file_content_hash(file_name, encoding, database, newline=newline) if function_key not in database: # We do not have a cached version of this computation diff --git a/vunit/check_preprocessor.py b/vunit/check_preprocessor.py index 155ea8acf..d57747260 100644 --- a/vunit/check_preprocessor.py +++ b/vunit/check_preprocessor.py @@ -17,9 +17,7 @@ class CheckPreprocessor(object): """ def __init__(self): - self._find_operators = re.compile( - r"\?/=|\?<=|\?>=|\?<|\?>|\?=|/=|<=|>=|<|>|=", re.MULTILINE - ) + self._find_operators = re.compile(r"\?/=|\?<=|\?>=|\?<|\?>|\?=|/=|<=|>=|<|>|=", re.MULTILINE) self._find_quotes = re.compile(r'"|' + r"'", re.MULTILINE) self._find_comments = re.compile(r"--|/\*|\*/", re.MULTILINE) self._actual_formal = re.compile(r"=>(?P.*)", re.MULTILINE) @@ -30,9 +28,7 @@ def run(self, code, file_name): # pylint: disable=unused-argument """ Preprocess code and return result also given the file_name of the original file """ - check_relation_pattern = re.compile( - r"[^a-zA-Z0-9_](?Pcheck_relation)\s*(?P\()", re.MULTILINE - ) + check_relation_pattern = re.compile(r"[^a-zA-Z0-9_](?Pcheck_relation)\s*(?P\()", re.MULTILINE) check_relation_calls = list(check_relation_pattern.finditer(code)) check_relation_calls.reverse() @@ -43,19 +39,11 @@ def run(self, code, file_name): # pylint: disable=unused-argument offset_to_point_before_closing_paranthesis, ) = self._extract_relation(code, match) if relation: - context_msg_parameter = ( - ", context_msg => %s" % relation.make_context_msg() - ) + context_msg_parameter = ", context_msg => %s" % relation.make_context_msg() code = ( - code[ - : match.end("parameters") - + offset_to_point_before_closing_paranthesis - ] + code[: match.end("parameters") + offset_to_point_before_closing_paranthesis] + context_msg_parameter - + code[ - match.end("parameters") - + offset_to_point_before_closing_paranthesis : - ] + + code[match.end("parameters") + offset_to_point_before_closing_paranthesis :] ) return code @@ -93,8 +81,7 @@ def end_of_parameter(token): if not relation: raise SyntaxError( - "Failed to find relation in %s" - % code[check.start("call") : check.end("parameters") + index] + "Failed to find relation in %s" % code[check.start("call") : check.end("parameters") + index] ) return relation, index - 1 @@ -162,19 +149,14 @@ def _get_relation_from_parameter(self, tokens): def find_top_level_match(matches, tokens, top_level=1): if matches: for match in matches: - if ( - not tokens[match.start()].is_quote - and tokens[match.start()].level == top_level - ): + if not tokens[match.start()].is_quote and tokens[match.start()].level == top_level: return match return None relation = None token_string = "".join([token.value for token in tokens]).strip() - actual_formal = find_top_level_match( - self._actual_formal.finditer(token_string), tokens - ) + actual_formal = find_top_level_match(self._actual_formal.finditer(token_string), tokens) if actual_formal: expr = actual_formal.group("actual") start = actual_formal.start("actual") @@ -195,24 +177,14 @@ def find_top_level_match(matches, tokens, top_level=1): ) + 1 ) - top_level_match = find_top_level_match( - self._find_operators.finditer(expr), tokens[start:], top_level - ) + top_level_match = find_top_level_match(self._find_operators.finditer(expr), tokens[start:], top_level) if top_level_match: if top_level == 1: left = expr[: top_level_match.start()].strip() right = expr[top_level_match.end() :].strip() else: - left = ( - expr[: top_level_match.start()] - .replace("(", "", top_level - 1) - .strip() - ) - right = ( - expr[: top_level_match.end() : -1] - .replace(")", "", top_level - 1) - .strip()[::-1] - ) + left = expr[: top_level_match.start()].replace("(", "", top_level - 1).strip() + right = expr[: top_level_match.end() : -1].replace(")", "", top_level - 1).strip()[::-1] relation = Relation(left, top_level_match.group(), right) diff --git a/vunit/color_printer.py b/vunit/color_printer.py index 94c8d3c17..2c844ed13 100644 --- a/vunit/color_printer.py +++ b/vunit/color_printer.py @@ -30,9 +30,7 @@ def __init__(self): pass @staticmethod - def write( - text, output_file=None, fg=None, bg=None - ): # pylint: disable=unused-argument + def write(text, output_file=None, fg=None, bg=None): # pylint: disable=unused-argument """ Print the text in color to the output_file uses stdout if output_file is None @@ -48,9 +46,7 @@ def __init__(self): ColorPrinter.__init__(self) @staticmethod - def write( - text, output_file=None, fg=None, bg=None - ): # pylint: disable=unused-argument + def write(text, output_file=None, fg=None, bg=None): # pylint: disable=unused-argument """ Print the text in color to the output_file uses stdout if output_file is None @@ -115,9 +111,7 @@ def _ansi_wrap(self, text, fg, bg): if bg is not None and "i" in bg: codes.append(4) # Underscore - return ( - "\033[" + ";".join([str(code) for code in codes]) + "m" + text + "\033[0m" - ) + return "\033[" + ";".join([str(code) for code in codes]) + "m" + text + "\033[0m" class Coord(Structure): diff --git a/vunit/com/codec_vhdl_array_type.py b/vunit/com/codec_vhdl_array_type.py index 6babe7ade..e20833288 100644 --- a/vunit/com/codec_vhdl_array_type.py +++ b/vunit/com/codec_vhdl_array_type.py @@ -28,31 +28,17 @@ def generate_codecs_and_support_functions(self): and self.range2.attribute is None and self.range2.range_type is None ) - is_constrained = ( - self.range1.range_type is None and self.range2.range_type is None - ) + is_constrained = self.range1.range_type is None and self.range2.range_type is None declarations += template.codec_declarations.substitute(type=self.identifier) declarations += template.to_string_declarations.substitute(type=self.identifier) if is_constrained: if has_one_dimension: - definitions += template.constrained_1d_array_definition.substitute( - type=self.identifier - ) - definitions += ( - template.constrained_1d_array_to_string_definition.substitute( - type=self.identifier - ) - ) + definitions += template.constrained_1d_array_definition.substitute(type=self.identifier) + definitions += template.constrained_1d_array_to_string_definition.substitute(type=self.identifier) else: - definitions += template.constrained_2d_array_definition.substitute( - type=self.identifier - ) - definitions += ( - template.constrained_2d_array_to_string_definition.substitute( - type=self.identifier - ) - ) + definitions += template.constrained_2d_array_definition.substitute(type=self.identifier) + definitions += template.constrained_2d_array_to_string_definition.substitute(type=self.identifier) else: if has_one_dimension: init_value = "" @@ -61,10 +47,8 @@ def generate_codecs_and_support_functions(self): init_value=init_value, range_type=self.range1.range_type, ) - definitions += ( - template.unconstrained_1d_array_to_string_definition.substitute( - array_type=self.identifier, range_type=self.range1.range_type - ) + definitions += template.unconstrained_1d_array_to_string_definition.substitute( + array_type=self.identifier, range_type=self.range1.range_type ) else: definitions += template.unconstrained_2d_array_definition.substitute( @@ -72,12 +56,10 @@ def generate_codecs_and_support_functions(self): range_type1=self.range1.range_type, range_type2=self.range2.range_type, ) - definitions += ( - template.unconstrained_2d_array_to_string_definition.substitute( - array_type=self.identifier, - range_type1=self.range1.range_type, - range_type2=self.range2.range_type, - ) + definitions += template.unconstrained_2d_array_to_string_definition.substitute( + array_type=self.identifier, + range_type1=self.range1.range_type, + range_type2=self.range2.range_type, ) return declarations, definitions diff --git a/vunit/com/codec_vhdl_enumeration_type.py b/vunit/com/codec_vhdl_enumeration_type.py index 347161044..cacdd9987 100644 --- a/vunit/com/codec_vhdl_enumeration_type.py +++ b/vunit/com/codec_vhdl_enumeration_type.py @@ -24,18 +24,12 @@ def generate_codecs_and_support_functions(self, offset=0): definitions = "" if len(self.literals) > 256: - raise NotImplementedError( - "Support for enums with more than 256 values are yet to be implemented" - ) + raise NotImplementedError("Support for enums with more than 256 values are yet to be implemented") declarations += template.codec_declarations.substitute(type=self.identifier) - definitions += template.enumeration_codec_definitions.substitute( - type=self.identifier, offset=offset - ) + definitions += template.enumeration_codec_definitions.substitute(type=self.identifier, offset=offset) declarations += template.to_string_declarations.substitute(type=self.identifier) - definitions += template.enumeration_to_string_definitions.substitute( - type=self.identifier - ) + definitions += template.enumeration_to_string_definitions.substitute(type=self.identifier) return declarations, definitions diff --git a/vunit/com/codec_vhdl_package.py b/vunit/com/codec_vhdl_package.py index 4b1d9a1ef..0c2226b1f 100644 --- a/vunit/com/codec_vhdl_package.py +++ b/vunit/com/codec_vhdl_package.py @@ -76,11 +76,9 @@ def generate_codecs_and_support_functions(self): msg_type_enumeration_types, ) = self._create_enumeration_of_all_msg_types() if all_msg_types_enumeration_type is not None: - declarations += ( - self._template.all_msg_types_enumeration_type_declaration.substitute( - identifier=all_msg_types_enumeration_type.identifier, - literals=", ".join(all_msg_types_enumeration_type.literals), - ) + declarations += self._template.all_msg_types_enumeration_type_declaration.substitute( + identifier=all_msg_types_enumeration_type.identifier, + literals=", ".join(all_msg_types_enumeration_type.literals), ) if all_msg_types_enumeration_type is not None: @@ -91,10 +89,7 @@ def generate_codecs_and_support_functions(self): type=all_msg_types_enumeration_type.identifier ) - ( - new_declarations, - new_definitions, - ) = self._generate_enumeration_codec_and_to_string_functions( + (new_declarations, new_definitions,) = self._generate_enumeration_codec_and_to_string_functions( all_msg_types_enumeration_type, msg_type_enumeration_types ) declarations += new_declarations @@ -151,18 +146,14 @@ def _create_enumeration_of_all_msg_types(self): msg_type_enumeration_types = [] for record in self.record_types: if record.elements[0].identifier_list[0] == "msg_type": - msg_type_enumeration_types.append( - record.elements[0].subtype_indication.code - ) + msg_type_enumeration_types.append(record.elements[0].subtype_indication.code) msg_type_enumeration_literals = [] for enum in self.enumeration_types: if enum.identifier in msg_type_enumeration_types: for literal in enum.literals: if literal in msg_type_enumeration_literals: - raise RuntimeError( - "Different msg_type enumerations may not have the same literals" - ) + raise RuntimeError("Different msg_type enumerations may not have the same literals") msg_type_enumeration_literals.append(literal) @@ -184,9 +175,7 @@ def _generate_enumeration_codec_and_to_string_functions( definitions = "" enumeration_offset = 0 for enum in self.enumeration_types + ( - [all_msg_types_enumeration_type] - if all_msg_types_enumeration_type is not None - else [] + [all_msg_types_enumeration_type] if all_msg_types_enumeration_type is not None else [] ): if enum.identifier in msg_type_enumeration_types: @@ -221,9 +210,7 @@ def _generate_msg_type_encoders(self): # pylint: disable=too-many-locals msg_type_record_types = self._get_records_with_an_initial_msg_type_element() for record in msg_type_record_types: - msg_type_values = enumeration_types.get( - record.elements[0].subtype_indication.type_mark - ) + msg_type_values = enumeration_types.get(record.elements[0].subtype_indication.type_mark) if msg_type_values is None: continue @@ -236,48 +223,33 @@ def _generate_msg_type_encoders(self): # pylint: disable=too-many-locals for identifier in element.identifier_list: if identifier != "msg_type": parameter_list.append( - " constant %s : %s" - % (identifier, element.subtype_indication.code) - ) - parameter_type_list.append( - element.subtype_indication.type_mark + " constant %s : %s" % (identifier, element.subtype_indication.code) ) + parameter_type_list.append(element.subtype_indication.type_mark) encoding_list.append("encode(%s)" % identifier) else: - encoding_list.append( - "encode(%s'(%s))" - % (element.subtype_indication.code, value) - ) + encoding_list.append("encode(%s'(%s))" % (element.subtype_indication.code, value)) if parameter_list == []: parameter_part = "" alias_signature = value + "[return string];" else: parameter_part = " (\n" + ";\n".join(parameter_list) + ")" - alias_signature = ( - value - + "[" - + ", ".join(parameter_type_list) - + " return string];" - ) + alias_signature = value + "[" + ", ".join(parameter_type_list) + " return string];" encodings = " & ".join(encoding_list) - declarations += ( - self._template.msg_type_record_codec_declaration.substitute( - name=value, - parameter_part=parameter_part, - alias_signature=alias_signature, - alias_name=value + "_msg", - ) + declarations += self._template.msg_type_record_codec_declaration.substitute( + name=value, + parameter_part=parameter_part, + alias_signature=alias_signature, + alias_name=value + "_msg", ) - definitions += ( - self._template.msg_type_record_codec_definition.substitute( - name=value, - parameter_part=parameter_part, - num_of_encodings=len(encoding_list), - encodings=encodings, - ) + definitions += self._template.msg_type_record_codec_definition.substitute( + name=value, + parameter_part=parameter_part, + num_of_encodings=len(encoding_list), + encodings=encodings, ) return declarations, definitions @@ -294,16 +266,8 @@ def _generate_get_functions(self): msg_type_type = record.elements[0].subtype_indication.code if msg_type_type not in msg_type_types: msg_type_types.append(msg_type_type) - declarations += ( - self._template.get_specific_msg_type_declaration.substitute( - type=msg_type_type - ) - ) - definitions += ( - self._template.get_specific_msg_type_definition.substitute( - type=msg_type_type - ) - ) + declarations += self._template.get_specific_msg_type_declaration.substitute(type=msg_type_type) + definitions += self._template.get_specific_msg_type_definition.substitute(type=msg_type_type) return declarations, definitions diff --git a/vunit/configuration.py b/vunit/configuration.py index fe795da26..985ae7f09 100644 --- a/vunit/configuration.py +++ b/vunit/configuration.py @@ -145,9 +145,7 @@ def call_pre_config(self, output_path, simulator_output_path): if self.pre_config is None: return True - args = inspect.getargspec( # pylint: disable=deprecated-method - self.pre_config - ).args + args = inspect.getargspec(self.pre_config).args # pylint: disable=deprecated-method kwargs = { "output_path": output_path, @@ -167,9 +165,7 @@ def call_post_check(self, output_path, read_output): if self.post_check is None: return True - args = inspect.getargspec( # pylint: disable=deprecated-method - self.post_check - ).args + args = inspect.getargspec(self.post_check).args # pylint: disable=deprecated-method kwargs = {"output_path": lambda: output_path, "output": read_output} @@ -222,9 +218,7 @@ def set_sim_option(self, name, value, overwrite=True): for configs in self.get_configuration_dicts(): for config in configs.values(): if not overwrite: - config.set_sim_option( - name, config.sim_options.get(name, []) + value - ) + config.set_sim_option(name, config.sim_options.get(name, []) + value) continue config.set_sim_option(name, value) @@ -261,9 +255,7 @@ def add_config( # pylint: disable=too-many-arguments self._check_enabled() if name in (DEFAULT_NAME, ""): - raise ValueError( - "Illegal configuration name %r. Must be non-empty string" % name - ) + raise ValueError("Illegal configuration name %r. Must be non-empty string" % name) for configs in self.get_configuration_dicts(): if name in configs: diff --git a/vunit/csv_logs.py b/vunit/csv_logs.py index 4ede50b09..1014cce2a 100644 --- a/vunit/csv_logs.py +++ b/vunit/csv_logs.py @@ -42,17 +42,13 @@ def add(self, pattern): fread.seek(0) if sample: dialect = Sniffer().sniff(sample) - self._entries += DictReader( - fread, fieldnames=self._field_names, dialect=dialect - ) + self._entries += DictReader(fread, fieldnames=self._field_names, dialect=dialect) self._entries.sort(key=lambda dictionary: int(dictionary["#"])) def write(self, output_file): # pylint: disable=missing-docstring with Path(output_file).open("w", encoding=self._encoding) as fwrite: - csv_writer = DictWriter( - fwrite, delimiter=",", fieldnames=self._field_names, lineterminator="\n" - ) + csv_writer = DictWriter(fwrite, delimiter=",", fieldnames=self._field_names, lineterminator="\n") csv_writer.writerow({name: name for name in self._field_names}) csv_writer.writerows(self._entries) diff --git a/vunit/library.py b/vunit/library.py index eebb2a34d..1d7913901 100644 --- a/vunit/library.py +++ b/vunit/library.py @@ -21,9 +21,7 @@ class Library(object): # pylint: disable=too-many-instance-attributes Represents a VHDL library """ - def __init__( - self, name: str, directory: str, vhdl_standard: VHDLStandard, is_external=False - ): + def __init__(self, name: str, directory: str, vhdl_standard: VHDLStandard, is_external=False): self.name = name self.directory = directory @@ -57,9 +55,7 @@ def add_source_file(self, source_file): if source_file.name in self._source_files: old_source_file = self._source_files[source_file.name] if old_source_file.content_hash != source_file.content_hash: - raise RuntimeError( - "%s already added to library %s" % (source_file.name, self.name) - ) + raise RuntimeError("%s already added to library %s" % (source_file.name, self.name)) LOGGER.info( "Ignoring duplicate file %s added to library %s due to identical contents", @@ -106,9 +102,7 @@ def _check_duplication(self, dictionary, design_unit): and give warning """ if design_unit.name in dictionary: - self._warning_on_duplication( - design_unit, dictionary[design_unit.name].source_file.name - ) + self._warning_on_duplication(design_unit, dictionary[design_unit.name].source_file.name) def add_vhdl_design_units(self, design_units): """ @@ -132,33 +126,22 @@ def add_vhdl_design_units(self, design_units): if design_unit.primary_design_unit not in self._architectures: self._architectures[design_unit.primary_design_unit] = {} - if ( - design_unit.name - in self._architectures[design_unit.primary_design_unit] - ): + if design_unit.name in self._architectures[design_unit.primary_design_unit]: self._warning_on_duplication( design_unit, - self._architectures[design_unit.primary_design_unit][ - design_unit.name - ].source_file.name, + self._architectures[design_unit.primary_design_unit][design_unit.name].source_file.name, ) - self._architectures[design_unit.primary_design_unit][ - design_unit.name - ] = design_unit + self._architectures[design_unit.primary_design_unit][design_unit.name] = design_unit if design_unit.primary_design_unit in self._entities: - self._entities[ - design_unit.primary_design_unit - ].add_architecture(design_unit) + self._entities[design_unit.primary_design_unit].add_architecture(design_unit) if design_unit.unit_type == "package body": if design_unit.primary_design_unit in self._package_bodies: self._warning_on_duplication( design_unit, - self._package_bodies[ - design_unit.primary_design_unit - ].source_file.name, + self._package_bodies[design_unit.primary_design_unit].source_file.name, ) self._package_bodies[design_unit.primary_design_unit] = design_unit @@ -169,9 +152,7 @@ def add_verilog_design_units(self, design_units): for design_unit in design_units: if design_unit.unit_type == "module": if design_unit.name in self.modules: - self._warning_on_duplication( - design_unit, self.modules[design_unit.name].source_file.name - ) + self._warning_on_duplication(design_unit, self.modules[design_unit.name].source_file.name) self.modules[design_unit.name] = design_unit elif design_unit.unit_type == "package": if design_unit.name in self.verilog_packages: diff --git a/vunit/location_preprocessor.py b/vunit/location_preprocessor.py index b35729131..047f250d6 100644 --- a/vunit/location_preprocessor.py +++ b/vunit/location_preprocessor.py @@ -83,11 +83,7 @@ def remove_subprogram(self, subprogram): """ Remove a subprogram name from the list of known names to preprocess """ - if ( - subprogram - not in self._subprograms_without_arguments - + self._subprograms_with_arguments - ): + if subprogram not in self._subprograms_without_arguments + self._subprograms_with_arguments: raise RuntimeError("Unable to remove unknown subprogram %s" % subprogram) if subprogram in self._subprograms_without_arguments: @@ -113,9 +109,7 @@ def _find_closing_parenthesis(args): _already_fixed_file_name_pattern = re.compile(r"file_name\s*=>", re.MULTILINE) _already_fixed_line_num_pattern = re.compile(r"line_num\s*=>", re.MULTILINE) - _subprogram_declaration_start_backwards_pattern = re.compile( - r"\s+(erudecorp|noitcnuf)" - ) + _subprogram_declaration_start_backwards_pattern = re.compile(r"\s+(erudecorp|noitcnuf)") _assignment_pattern = re.compile(r"\s*(:=|<=)", re.MULTILINE) def run(self, code, file_name): @@ -123,61 +117,36 @@ def run(self, code, file_name): Return preprocessed code given file_name of original file """ potential_subprogram_call_with_arguments_pattern = re.compile( - r"[^a-zA-Z0-9_](?P" - + "|".join(self._subprograms_with_arguments) - + r")\s*(?P\()", + r"[^a-zA-Z0-9_](?P" + "|".join(self._subprograms_with_arguments) + r")\s*(?P\()", re.MULTILINE, ) potential_subprogram_call_without_arguments_pattern = re.compile( - r"[^a-zA-Z0-9_](?P" - + "|".join(self._subprograms_without_arguments) - + r")\s*;", + r"[^a-zA-Z0-9_](?P" + "|".join(self._subprograms_without_arguments) + r")\s*;", re.MULTILINE, ) matches = list(potential_subprogram_call_with_arguments_pattern.finditer(code)) if self._subprograms_without_arguments: - matches += list( - potential_subprogram_call_without_arguments_pattern.finditer(code) - ) + matches += list(potential_subprogram_call_without_arguments_pattern.finditer(code)) matches.sort(key=lambda match: match.start("subprogram"), reverse=True) for match in matches: - if self._subprogram_declaration_start_backwards_pattern.match( - code[match.start() : 0 : -1] - ): + if self._subprogram_declaration_start_backwards_pattern.match(code[match.start() : 0 : -1]): continue file_name_association = ', file_name => "' + file_name + '"' - line_num_association = ", line_num => " + str( - 1 + code[: match.start("subprogram")].count("\n") - ) + line_num_association = ", line_num => " + str(1 + code[: match.start("subprogram")].count("\n")) if "args" in match.groupdict(): - closing_paranthesis_start = self._find_closing_parenthesis( - code[match.start("args") :] - ) + closing_paranthesis_start = self._find_closing_parenthesis(code[match.start("args") :]) - if self._assignment_pattern.match( - code[match.start("args") + closing_paranthesis_start + 1 :] - ): + if self._assignment_pattern.match(code[match.start("args") + closing_paranthesis_start + 1 :]): continue - args = code[ - match.start("args") : match.start("args") - + closing_paranthesis_start - ] - already_fixed_file_name = ( - self._already_fixed_file_name_pattern.search(args) is not None - ) - already_fixed_line_num = ( - self._already_fixed_line_num_pattern.search(args) is not None - ) - file_name_association = ( - file_name_association if not already_fixed_file_name else "" - ) - line_num_association = ( - line_num_association if not already_fixed_line_num else "" - ) + args = code[match.start("args") : match.start("args") + closing_paranthesis_start] + already_fixed_file_name = self._already_fixed_file_name_pattern.search(args) is not None + already_fixed_line_num = self._already_fixed_line_num_pattern.search(args) is not None + file_name_association = file_name_association if not already_fixed_file_name else "" + line_num_association = line_num_association if not already_fixed_line_num else "" code = ( code[: match.start("args") + closing_paranthesis_start] diff --git a/vunit/ostools.py b/vunit/ostools.py index 42d4cd105..a3976627d 100644 --- a/vunit/ostools.py +++ b/vunit/ostools.py @@ -127,9 +127,7 @@ def __init__(self, args, cwd=None, env=None): preexec_fn=os.setpgrp, # pylint: disable=no-member ) - LOGGER.debug( - "Started process with pid=%i: '%s'", self._process.pid, (" ".join(args)) - ) + LOGGER.debug("Started process with pid=%i: '%s'", self._process.pid, (" ".join(args))) self._queue = InterruptableQueue() self._reader = AsynchronousFileReader(self._process.stdout, self._queue) @@ -277,9 +275,7 @@ def eof(self): def read_file(file_name, encoding="utf-8", newline=None): """To stub during testing""" try: - with io.open( - file_name, "r", encoding=encoding, newline=newline - ) as file_to_read: + with io.open(file_name, "r", encoding=encoding, newline=newline) as file_to_read: data = file_to_read.read() except UnicodeDecodeError: LOGGER.warning( @@ -287,9 +283,7 @@ def read_file(file_name, encoding="utf-8", newline=None): file_name, encoding, ) - with io.open( - file_name, "r", encoding=encoding, errors="ignore", newline=newline - ) as file_to_read: + with io.open(file_name, "r", encoding=encoding, errors="ignore", newline=newline) as file_to_read: data = file_to_read.read() return data diff --git a/vunit/parsing/tokenizer.py b/vunit/parsing/tokenizer.py index f402287d4..cd120750e 100644 --- a/vunit/parsing/tokenizer.py +++ b/vunit/parsing/tokenizer.py @@ -14,9 +14,7 @@ def Token(kind, value="", location=None): # pylint: disable=invalid-name - return collections.namedtuple("Token", ["kind", "value", "location"])( - kind, value, location - ) + return collections.namedtuple("Token", ["kind", "value", "location"])(kind, value, location) class TokenKind: @@ -63,9 +61,7 @@ def finalize(self): re.VERBOSE | re.MULTILINE, ) - def tokenize( - self, code, file_name=None, previous_location=None, create_locations=False - ): + def tokenize(self, code, file_name=None, previous_location=None, create_locations=False): """ Tokenize the code """ @@ -164,9 +160,7 @@ def expect(self, *kinds): expected = str(kinds[0]) else: expected = "any of [%s]" % ", ".join(str(kind) for kind in kinds) - raise LocationException.error( - "Expected %s got %s" % (expected, token.kind), token.location - ) + raise LocationException.error("Expected %s got %s" % (expected, token.kind), token.location) return token def slice(self, start, end): @@ -212,9 +206,7 @@ def describe_location(location, first=True): lineno + 1, ) retval += line + "\n" - retval += (" " * (start - lstart)) + ( - "~" * (min(lend - 1, end) - start + 1) - ) + retval += (" " * (start - lstart)) + ("~" * (min(lend - 1, end) - start + 1)) return retval count = lend + 1 diff --git a/vunit/parsing/verilog/parser.py b/vunit/parsing/verilog/parser.py index 7d8828068..689da9108 100644 --- a/vunit/parsing/verilog/parser.py +++ b/vunit/parsing/verilog/parser.py @@ -69,10 +69,7 @@ def parse(self, file_name, include_paths=None, defines=None): if cached is not None: return cached - initial_defines = dict( - (key, Macro(key, self._tokenizer.tokenize(value))) - for key, value in defines.items() - ) + initial_defines = dict((key, Macro(key, self._tokenizer.tokenize(value))) for key, value in defines.items()) code = read_file(file_name, encoding=HDL_FILE_ENCODING) tokens = self._tokenizer.tokenize(code, file_name=file_name) included_files = [] @@ -83,9 +80,7 @@ def parse(self, file_name, include_paths=None, defines=None): included_files=included_files, ) - included_files_for_design_file = [ - name for _, name in included_files if name is not None - ] + included_files_for_design_file = [name for _, name in included_files if name is not None] result = VerilogDesignFile.parse(pp_tokens, included_files_for_design_file) if self._database is None: @@ -107,9 +102,7 @@ def _store_result(self, file_name, result, included_files, defines): """ new_included_files = [] for short_name, full_name in included_files: - new_included_files.append( - (short_name, full_name, self._content_hash(full_name)) - ) + new_included_files.append((short_name, full_name, self._content_hash(full_name))) key = self._key(file_name) self._database[key] = ( @@ -145,9 +138,7 @@ def _lookup_parse_cache(self, file_name, include_paths, defines): if key not in self._database: return None - old_content_hash, old_included_files, old_defines, old_result = self._database[ - key - ] + old_content_hash, old_included_files, old_defines, old_result = self._database[key] if old_defines != defines: return None @@ -183,9 +174,7 @@ def __init__( # pylint: disable=too-many-arguments self.modules = [] if modules is None else modules self.packages = [] if packages is None else packages self.imports = [] if imports is None else imports - self.package_references = ( - [] if package_references is None else package_references - ) + self.package_references = [] if package_references is None else package_references self.instances = [] if instances is None else instances self.included_files = [] if included_files is None else included_files @@ -194,11 +183,7 @@ def parse(cls, tokens, included_files): """ Parse verilog file """ - tokens = [ - token - for token in tokens - if token.kind not in (WHITESPACE, COMMENT, NEWLINE, MULTI_COMMENT) - ] + tokens = [token for token in tokens if token.kind not in (WHITESPACE, COMMENT, NEWLINE, MULTI_COMMENT)] return cls( modules=VerilogModule.find(tokens), packages=VerilogPackage.find(tokens), @@ -226,13 +211,9 @@ def find_imports(tokens): if token.kind == IDENTIFIER: results.append(token.value) else: - LocationException.warning( - "import bad argument", token.location - ).log(LOGGER) + LocationException.warning("import bad argument", token.location).log(LOGGER) except EOFException: - LocationException.warning( - "EOF reached when parsing import", location=import_token.location - ).log(LOGGER) + LocationException.warning("EOF reached when parsing import", location=import_token.location).log(LOGGER) return results @staticmethod diff --git a/vunit/parsing/verilog/preprocess.py b/vunit/parsing/verilog/preprocess.py index 0f6111873..57eb6c58f 100644 --- a/vunit/parsing/verilog/preprocess.py +++ b/vunit/parsing/verilog/preprocess.py @@ -58,9 +58,7 @@ def preprocess(self, tokens, defines=None, include_paths=None, included_files=No self._macro_trace = set() return self._preprocess(tokens, defines, include_paths, included_files) - def _preprocess( - self, tokens, defines=None, include_paths=None, included_files=None - ): + def _preprocess(self, tokens, defines=None, include_paths=None, included_files=None): """ Pre-process tokens while filling in defines """ @@ -77,9 +75,7 @@ def _preprocess( continue try: - result += self.preprocessor( - token, stream, defines, include_paths, included_files - ) + result += self.preprocessor(token, stream, defines, include_paths, included_files) except LocationException as exc: exc.log(LOGGER) @@ -115,9 +111,7 @@ def preprocessor( # pylint: disable=too-many-arguments,too-many-branches included_files=included_files, ) except EOFException as exe: - raise LocationException.warning( - "EOF reached when parsing `%s" % token.value, token.location - ) from exe + raise LocationException.warning("EOF reached when parsing `%s" % token.value, token.location) from exe elif token.value in ("celldefine", "endcelldefine", "nounconnected_drive"): # Ignored @@ -139,9 +133,7 @@ def preprocessor( # pylint: disable=too-many-arguments,too-many-branches self._skip_protected_region(stream) elif token.value in defines: - return self.expand_macro( - token, stream, defines, include_paths, included_files - ) + return self.expand_macro(token, stream, defines, include_paths, included_files) else: raise LocationException.debug("Verilog undefined name", token.location) @@ -188,9 +180,7 @@ def expand_macro( # pylint: disable=too-many-arguments ) self._macro_trace.add(macro_point) tokens = self._preprocess( - macro.expand_from_stream( - macro_token, stream, previous=macro_token.location - ), + macro.expand_from_stream(macro_token, stream, previous=macro_token.location), defines=defines, include_paths=include_paths, included_files=included_files, @@ -209,9 +199,7 @@ def check_arg(if_token, arg): Check the define argument of an if statement """ if arg.kind != IDENTIFIER: - raise LocationException.warning( - "Bad argument to `%s" % if_token.value, arg.location - ) + raise LocationException.warning("Bad argument to `%s" % if_token.value, arg.location) stream.skip_while(NEWLINE) def determine_if_taken(if_token, arg): @@ -266,9 +254,7 @@ def determine_if_taken(if_token, arg): stream.skip_while(NEWLINE) return result - def include( # pylint: disable=too-many-arguments - self, token, stream, include_paths, included_files, defines - ): + def include(self, token, stream, include_paths, included_files, defines): # pylint: disable=too-many-arguments """ Handle `include directive """ @@ -276,21 +262,15 @@ def include( # pylint: disable=too-many-arguments try: tok = stream.pop() except EOFException as exe: - raise LocationException.warning( - "EOF reached when parsing `include argument", token.location - ) from exe + raise LocationException.warning("EOF reached when parsing `include argument", token.location) from exe if tok.kind == PREPROCESSOR: if tok.value in defines: macro = defines[tok.value] else: - raise LocationException.warning( - "Verilog `include argument not defined", tok.location - ) + raise LocationException.warning("Verilog `include argument not defined", tok.location) - expanded_tokens = self.expand_macro( - tok, stream, defines, include_paths, included_files - ) + expanded_tokens = self.expand_macro(tok, stream, defines, include_paths, included_files) # pylint crashes when trying to fix the warning below if len(expanded_tokens) == 0: # pylint: disable=len-as-condition @@ -300,18 +280,14 @@ def include( # pylint: disable=too-many-arguments ) if expanded_tokens[0].kind != STRING: - raise LocationException.warning( - "Verilog `include has bad argument", expanded_tokens[0].location - ) + raise LocationException.warning("Verilog `include has bad argument", expanded_tokens[0].location) file_name_tok = expanded_tokens[0] elif tok.kind == STRING: file_name_tok = tok else: - raise LocationException.warning( - "Verilog `include bad argument", tok.location - ) + raise LocationException.warning("Verilog `include bad argument", tok.location) included_file = find_included_file(include_paths, file_name_tok.value) included_files.append((file_name_tok.value, included_file)) @@ -338,9 +314,7 @@ def include( # pylint: disable=too-many-arguments file_name=included_file, previous_location=token.location, ) - included_tokens = self._preprocess( - included_tokens, defines, include_paths, included_files - ) + included_tokens = self._preprocess(included_tokens, defines, include_paths, included_files) self._include_trace.remove(include_point) return included_tokens @@ -364,17 +338,13 @@ def undef(undef_token, stream, defines): try: name_token = stream.pop() except EOFException as exe: - raise LocationException.warning( - "EOF reached when parsing `undef", undef_token.location - ) from exe + raise LocationException.warning("EOF reached when parsing `undef", undef_token.location) from exe if name_token.kind != IDENTIFIER: raise LocationException.warning("Bad argument to `undef", name_token.location) if name_token.value not in defines: - raise LocationException.warning( - "`undef argument was not previously defined", name_token.location - ) + raise LocationException.warning("`undef argument was not previously defined", name_token.location) del defines[name_token.value] @@ -387,14 +357,10 @@ def define(define_token, stream): try: name_token = stream.pop() except EOFException as exe: - raise LocationException.warning( - "Verilog `define without argument", define_token.location - ) from exe + raise LocationException.warning("Verilog `define without argument", define_token.location) from exe if name_token.kind != IDENTIFIER: - raise LocationException.warning( - "Verilog `define invalid name", name_token.location - ) + raise LocationException.warning("Verilog `define invalid name", name_token.location) name = name_token.value @@ -477,10 +443,7 @@ def expand(self, values, previous): tokens += value else: tokens.append(token) - return [ - Token(tok.kind, tok.value, add_previous(tok.location, previous)) - for tok in tokens - ] + return [Token(tok.kind, tok.value, add_previous(tok.location, previous)) for tok in tokens] def __eq__(self, other): return ( @@ -512,14 +475,11 @@ def expand_from_stream(self, token, stream, previous=None): if name in self.defaults: values.append(self.defaults[name]) else: - raise LocationException.warning( - "Missing value for argument %s" % name, token.location - ) + raise LocationException.warning("Missing value for argument %s" % name, token.location) elif len(values) > len(self.args): raise LocationException.warning( - "Too many arguments got %i expected %i" - % (len(values), len(self.args)), + "Too many arguments got %i expected %i" % (len(values), len(self.args)), token.location, ) @@ -536,9 +496,7 @@ def _parse_macro_actuals(define_token, stream): token = stream.pop() if token.kind != LPAR: - raise LocationException.warning( - "Bad `define argument list", define_token.location - ) + raise LocationException.warning("Bad `define argument list", define_token.location) token = stream.pop() value = [] values = [] @@ -561,12 +519,7 @@ def _parse_macro_actuals(define_token, stream): elif token.kind is RPAR: par_count += -1 - value_ok = ( - token.kind == COMMA - and bracket_count == 0 - and brace_count == 0 - and par_count == 0 - ) + value_ok = token.kind == COMMA and bracket_count == 0 and brace_count == 0 and par_count == 0 if value_ok: values.append(value) diff --git a/vunit/project.py b/vunit/project.py index 86ecaae48..89708a4ec 100644 --- a/vunit/project.py +++ b/vunit/project.py @@ -138,9 +138,7 @@ def add_source_file( # pylint: disable=too-many-arguments library, vhdl_parser=self._vhdl_parser, database=self._database, - vhdl_standard=library.vhdl_standard - if vhdl_standard is None - else vhdl_standard, + vhdl_standard=library.vhdl_standard if vhdl_standard is None else vhdl_standard, no_parse=no_parse, ) elif file_type in VERILOG_FILE_TYPES: @@ -212,9 +210,7 @@ def _find_other_vhdl_design_unit_dependencies( # pylint: disable=too-many-branc except KeyError: if ref.library not in self._builtin_libraries: - LOGGER.warning( - "%s: failed to find library '%s'", source_file.name, ref.library - ) + LOGGER.warning("%s: failed to find library '%s'", source_file.name, ref.library) continue if ref.is_entity_reference() and ref.design_unit in library.modules: @@ -348,9 +344,7 @@ def add_dependency(start, end): is_new = dependency_graph.add_dependency(start, end) if is_new: - LOGGER.debug( - "Adding dependency: %s depends on %s", end.name, start.name - ) + LOGGER.debug("Adding dependency: %s depends on %s", end.name, start.name) def add_dependencies(dependency_function, files): """ @@ -365,29 +359,19 @@ def add_dependencies(dependency_function, files): for source_file in self._source_files_in_order: dependency_graph.add_node(source_file) - vhdl_files = [ - source_file - for source_file in self._source_files_in_order - if source_file.file_type == "vhdl" - ] + vhdl_files = [source_file for source_file in self._source_files_in_order if source_file.file_type == "vhdl"] - depend_on_package_bodies = ( - self._depend_on_package_body or implementation_dependencies - ) + depend_on_package_bodies = self._depend_on_package_body or implementation_dependencies add_dependencies( lambda source_file: self._find_other_vhdl_design_unit_dependencies( source_file, depend_on_package_bodies, implementation_dependencies ), vhdl_files, ) - add_dependencies( - self._find_primary_secondary_design_unit_dependencies, vhdl_files - ) + add_dependencies(self._find_primary_secondary_design_unit_dependencies, vhdl_files) verilog_files = [ - source_file - for source_file in self._source_files_in_order - if source_file.file_type in VERILOG_FILE_TYPES + source_file for source_file in self._source_files_in_order if source_file.file_type in VERILOG_FILE_TYPES ] add_dependencies(self._find_verilog_package_dependencies, verilog_files) @@ -426,9 +410,7 @@ def _get_compile_timestamps(self, files): timestamps[source_file] = ostools.get_modification_time(hash_file_name) return timestamps - def get_files_in_compile_order( - self, incremental=True, dependency_graph=None, files=None - ): + def get_files_in_compile_order(self, incremental=True, dependency_graph=None, files=None): """ Get a list of all files in compile order param: incremental: Only return files that need recompile if True @@ -440,9 +422,7 @@ def get_files_in_compile_order( files_to_recompile = self._get_files_to_recompile( files or self.get_source_files_in_order(), dependency_graph, incremental ) - return self._get_affected_files_in_compile_order( - files_to_recompile, dependency_graph.get_dependent - ) + return self._get_affected_files_in_compile_order(files_to_recompile, dependency_graph.get_dependent) def _get_files_to_recompile(self, files, dependency_graph, incremental): """ @@ -454,15 +434,11 @@ def _get_files_to_recompile(self, files, dependency_graph, incremental): timestamps = self._get_compile_timestamps(files) result_list = [] for source_file in files: - if (not incremental) or self._needs_recompile( - dependency_graph, source_file, timestamps - ): + if (not incremental) or self._needs_recompile(dependency_graph, source_file, timestamps): result_list.append(source_file) return result_list - def get_dependencies_in_compile_order( - self, target_files=None, implementation_dependencies=False - ): + def get_dependencies_in_compile_order(self, target_files=None, implementation_dependencies=False): """ Get a list of dependencies of target files including the target files. @@ -473,9 +449,7 @@ def get_dependencies_in_compile_order( target_files = self._source_files_in_order dependency_graph = self.create_dependency_graph(implementation_dependencies) - return self._get_affected_files_in_compile_order( - set(target_files), dependency_graph.get_dependencies - ) + return self._get_affected_files_in_compile_order(set(target_files), dependency_graph.get_dependencies) def _get_affected_files_in_compile_order(self, target_files, get_depend_func): """ @@ -502,16 +476,12 @@ def get_minimal_file_set_in_compile_order(self, target_files=None): ### # Now the file set is known, but it has to be evaluated which files # realy have to be compiled according to their timestamp. - max_file_set_to_be_compiled = self.get_files_in_compile_order( - incremental=True, files=dependency_files - ) + max_file_set_to_be_compiled = self.get_files_in_compile_order(incremental=True, files=dependency_files) # get_files_in_compile_order returns more files than actually are in the # list of dependent files. So the list is filtered for only the files # that are required - min_file_set_to_be_compiled = [ - f for f in max_file_set_to_be_compiled if f in dependency_files - ] + min_file_set_to_be_compiled = [f for f in max_file_set_to_be_compiled if f in dependency_files] return min_file_set_to_be_compiled def _get_affected_files(self, target_files, get_depend_func): @@ -597,9 +567,7 @@ def _needs_recompile(self, dependency_graph, source_file, timestamps): ) return True - LOGGER.debug( - "%s has same hash file and must not be recompiled", source_file.name - ) + LOGGER.debug("%s has same hash file and must not be recompiled", source_file.name) return False @@ -609,12 +577,7 @@ def _hash_file_name_of(self, source_file): """ library = self.get_library(source_file.library.name) prefix = hash_string(str(Path(source_file.name).parent)) - return str( - Path(library.directory) - / prefix - / Path(source_file.name).name - / ".vunit_hash" - ) + return str(Path(library.directory) / prefix / Path(source_file.name).name / ".vunit_hash") def update(self, source_file): """ diff --git a/vunit/sim_if/__init__.py b/vunit/sim_if/__init__.py index 582442d93..e651ca984 100644 --- a/vunit/sim_if/__init__.py +++ b/vunit/sim_if/__init__.py @@ -134,18 +134,12 @@ def find_toolchain(cls, executables, constraints=None): return None all_paths = [ - [ - str(Path(executables).parent.resolve()) - for executables in cls.find_executable(name) - ] + [str(Path(executables).parent.resolve()) for executables in cls.find_executable(name)] for name in executables ] for path0 in all_paths[0]: - if all( - [path0 in paths for paths in all_paths] - + [constraint(path0) for constraint in constraints] - ): + if all([path0 in paths for paths in all_paths] + [constraint(path0) for constraint in constraints]): return path0 return None @@ -183,9 +177,7 @@ def supports_coverage(): """ return False - def merge_coverage( # pylint: disable=unused-argument, no-self-use - self, file_name, args - ): + def merge_coverage(self, file_name, args): # pylint: disable=unused-argument, no-self-use """ Hook for simulator interface to creating coverage reports """ @@ -209,9 +201,7 @@ def compile_project( """ self.add_simulator_specific(project) self.setup_library_mapping(project) - self.compile_source_files( - project, printer, continue_on_error, target_files=target_files - ) + self.compile_source_files(project, printer, continue_on_error, target_files=target_files) def simulate(self, output_path, test_suite_name, config, elaborate_only): """ @@ -246,9 +236,7 @@ def _compile_source_file(self, source_file, printer): except subprocess.CalledProcessError as err: printer.write("failed", fg="ri") printer.write("\n") - printer.write( - "=== Command used: ===\n%s\n" % (subprocess.list2cmdline(command)) - ) + printer.write("=== Command used: ===\n%s\n" % (subprocess.list2cmdline(command))) printer.write("\n") printer.write("=== Command output: ===\n%s\n" % err.output) @@ -271,9 +259,7 @@ def compile_source_files( failures = [] if target_files is None: - source_files = project.get_files_in_compile_order( - dependency_graph=dependency_graph - ) + source_files = project.get_files_in_compile_order(dependency_graph=dependency_graph) else: source_files = project.get_minimal_file_set_in_compile_order(target_files) @@ -282,12 +268,8 @@ def compile_source_files( max_library_name = 0 max_source_file_name = 0 if source_files: - max_library_name = max( - len(source_file.library.name) for source_file in source_files - ) - max_source_file_name = max( - len(simplify_path(source_file.name)) for source_file in source_files - ) + max_library_name = max(len(source_file.library.name) for source_file in source_files) + max_source_file_name = max(len(simplify_path(source_file.name)) for source_file in source_files) for source_file in source_files: printer.write( @@ -307,9 +289,7 @@ def compile_source_files( if self._compile_source_file(source_file, printer): project.update(source_file) else: - source_files_to_skip.update( - dependency_graph.get_dependent([source_file]) - ) + source_files_to_skip.update(dependency_graph.get_dependent([source_file])) failures.append(source_file) if not continue_on_error: @@ -324,9 +304,7 @@ def compile_source_files( else: printer.write("Re-compile not needed\n") - def compile_source_file_command( # pylint: disable=unused-argument - self, source_file - ): + def compile_source_file_command(self, source_file): # pylint: disable=unused-argument raise NotImplementedError @staticmethod @@ -401,9 +379,7 @@ class ListOfStringOption(Option): def validate(self, value): def fail(): - raise ValueError( - "Option %r must be a list of strings. Got %r" % (self.name, value) - ) + raise ValueError("Option %r must be a list of strings. Got %r" % (self.name, value)) if is_string_not_iterable(value): fail() @@ -428,10 +404,7 @@ def __init__(self): def validate(self, value): if value not in self._legal_values: - raise ValueError( - "Option %r must be one of %s. Got %r" - % (self.name, self._legal_values, value) - ) + raise ValueError("Option %r must be one of %s. Got %r" % (self.name, self._legal_values, value)) def is_string_not_iterable(value): diff --git a/vunit/sim_if/activehdl.py b/vunit/sim_if/activehdl.py index e1b8db31f..9e973d8ae 100644 --- a/vunit/sim_if/activehdl.py +++ b/vunit/sim_if/activehdl.py @@ -58,9 +58,7 @@ def supports_vhdl_package_generics(cls): """ Returns True when this simulator supports VHDL package generics """ - proc = Process( - [str(Path(cls.find_prefix()) / "vcom"), "-version"], env=cls.get_env() - ) + proc = Process([str(Path(cls.find_prefix()) / "vcom"), "-version"], env=cls.get_env()) consumer = VersionConsumer() proc.consume_output(consumer) if consumer.version is not None: @@ -188,10 +186,7 @@ def _create_library_cfg(self): return with Path(self._library_cfg).open("w", encoding="utf-8") as ofile: - ofile.write( - '$INCLUDE = "%s"\n' - % str(Path(self._prefix).parent / "vlib" / "library.cfg") - ) + ofile.write('$INCLUDE = "%s"\n' % str(Path(self._prefix).parent / "vlib" / "library.cfg")) _library_re = re.compile(r'([a-zA-Z_]+)\s=\s"(.*)"') @@ -209,9 +204,7 @@ def _get_mapped_libraries(self): continue key = match.group(1) value = match.group(2) - libraries[key] = str( - (Path(self._library_cfg).parent / Path(value).parent).resolve() - ) + libraries[key] = str((Path(self._library_cfg).parent / Path(value).parent).resolve()) return libraries def _vsim_extra_args(self, config): @@ -219,14 +212,10 @@ def _vsim_extra_args(self, config): Determine vsim_extra_args """ vsim_extra_args = [] - vsim_extra_args = config.sim_options.get( - "activehdl.vsim_flags", vsim_extra_args - ) + vsim_extra_args = config.sim_options.get("activehdl.vsim_flags", vsim_extra_args) if self._gui: - vsim_extra_args = config.sim_options.get( - "activehdl.vsim_flags.gui", vsim_extra_args - ) + vsim_extra_args = config.sim_options.get("activehdl.vsim_flags.gui", vsim_extra_args) return " ".join(vsim_extra_args) @@ -235,20 +224,12 @@ def _create_load_function(self, config, output_path): Create the vunit_load TCL function that runs the vsim command and loads the design """ set_generic_str = "\n ".join( - ( - "set vunit_generic_%s {%s}" % (name, value) - for name, value in config.generics.items() - ) + ("set vunit_generic_%s {%s}" % (name, value) for name, value in config.generics.items()) ) set_generic_name_str = " ".join( - ( - "-g/%s/%s=${vunit_generic_%s}" % (config.entity_name, name, name) - for name in config.generics - ) - ) - pli_str = " ".join( - '-pli "%s"' % fix_path(name) for name in config.sim_options.get("pli", []) + ("-g/%s/%s=${vunit_generic_%s}" % (config.entity_name, name, name) for name in config.generics) ) + pli_str = " ".join('-pli "%s"' % fix_path(name) for name in config.sim_options.get("pli", [])) vsim_flags = [ pli_str, @@ -401,9 +382,7 @@ def _create_gui_script(self, common_file_name, config): if init_file is not None: tcl += 'source "%s"\n' % fix_path(str(Path(init_file).resolve())) - tcl += ( - 'puts "VUnit help: Design already loaded. Use run -all to run the test."\n' - ) + tcl += 'puts "VUnit help: Design already loaded. Use run -all to run the test."\n' return tcl @@ -442,9 +421,7 @@ def simulate(self, output_path, test_suite_name, config, elaborate_only): gui_file_name = script_path / "gui.tcl" write_file(common_file_name, self._create_common_script(config, output_path)) - write_file( - gui_file_name, self._create_gui_script(str(common_file_name), config) - ) + write_file(gui_file_name, self._create_gui_script(str(common_file_name), config)) write_file( str(batch_file_name), self._create_batch_script(str(common_file_name), elaborate_only), @@ -455,9 +432,7 @@ def simulate(self, output_path, test_suite_name, config, elaborate_only): renew_path(gui_path) return self._run_batch_file(str(gui_file_name), gui=True, cwd=gui_path) - return self._run_batch_file( - str(batch_file_name), gui=False, cwd=str(Path(self._library_cfg).parent) - ) + return self._run_batch_file(str(batch_file_name), gui=False, cwd=str(Path(self._library_cfg).parent)) @total_ordering @@ -507,9 +482,7 @@ class VersionConsumer(object): def __init__(self): self.version = None - _version_re = re.compile( - r"(?P\d+)\.(?P\d+)(?P[a-zA-Z]?)\.\d+\.\d+" - ) + _version_re = re.compile(r"(?P\d+)\.(?P\d+)(?P[a-zA-Z]?)\.\d+\.\d+") def __call__(self, line): match = self._version_re.search(line) diff --git a/vunit/sim_if/cds_file.py b/vunit/sim_if/cds_file.py index 51c68c679..c1f8f57ca 100644 --- a/vunit/sim_if/cds_file.py +++ b/vunit/sim_if/cds_file.py @@ -19,9 +19,7 @@ class CDSFile(dict): Only cares about 'define' but other lines are kept intact """ - _re_define = re.compile( - r'\s*define\s+([a-zA-Z0-9_]+)\s+"?(.*?)"?(#|$)', re.IGNORECASE - ) + _re_define = re.compile(r'\s*define\s+([a-zA-Z0-9_]+)\s+"?(.*?)"?(#|$)', re.IGNORECASE) @classmethod def parse(cls, file_name): @@ -51,11 +49,5 @@ def write(self, file_name): """ Write cds file to file named 'file_name' """ - contents = ( - "\n".join( - self._other_lines - + ['define %s "%s"' % item for item in sorted(self.items())] - ) - + "\n" - ) + contents = "\n".join(self._other_lines + ['define %s "%s"' % item for item in sorted(self.items())]) + "\n" write_file(file_name, contents) diff --git a/vunit/sim_if/factory.py b/vunit/sim_if/factory.py index 9a2f4f827..523ef79fa 100644 --- a/vunit/sim_if/factory.py +++ b/vunit/sim_if/factory.py @@ -80,9 +80,7 @@ def check_sim_option(self, name, value): known_options = sorted(list(self._sim_options.keys())) if name not in self._sim_options: - raise ValueError( - "Unknown sim_option %r, expected one of %r" % (name, known_options) - ) + raise ValueError("Unknown sim_option %r, expected one of %r" % (name, known_options)) self._sim_options[name].validate(value) @@ -92,9 +90,7 @@ def check_compile_option_name(self, name): """ known_options = sorted(list(self._compile_options.keys())) if name not in known_options: - raise ValueError( - "Unknown compile_option %r, expected one of %r" % (name, known_options) - ) + raise ValueError("Unknown compile_option %r, expected one of %r" % (name, known_options)) def check_compile_option(self, name, value): """ @@ -109,10 +105,7 @@ def select_simulator(self): or the first available """ available_simulators = self._detect_available_simulators() - name_mapping = { - simulator_class.name: simulator_class - for simulator_class in self.supported_simulators() - } + name_mapping = {simulator_class.name: simulator_class for simulator_class in self.supported_simulators()} if not available_simulators: return None @@ -122,9 +115,7 @@ def select_simulator(self): if simulator_name not in name_mapping: raise RuntimeError( ( - "Simulator from " - + environ_name - + " environment variable %r is not supported. " + "Simulator from " + environ_name + " environment variable %r is not supported. " "Supported simulators are %r" ) % (simulator_name, name_mapping.keys()) @@ -159,11 +150,7 @@ def _detect_available_simulators(self): """ Detect available simulators and return a list """ - return [ - simulator_class - for simulator_class in self.supported_simulators() - if simulator_class.is_available() - ] + return [simulator_class for simulator_class in self.supported_simulators() if simulator_class.is_available()] @property def has_simulator(self): diff --git a/vunit/sim_if/ghdl.py b/vunit/sim_if/ghdl.py index 47916bf23..6fdcc1baa 100644 --- a/vunit/sim_if/ghdl.py +++ b/vunit/sim_if/ghdl.py @@ -60,9 +60,7 @@ def add_arguments(parser): default=None, help="Save .vcd or .ghw to open in gtkwave", ) - group.add_argument( - "--gtkwave-args", default="", help="Arguments to pass to gtkwave" - ) + group.add_argument("--gtkwave-args", default="", help="Arguments to pass to gtkwave") @classmethod def from_args(cls, args, output_path, **kwargs): @@ -100,9 +98,7 @@ def __init__( # pylint: disable=too-many-arguments self._project = None if gui and (not self.find_executable("gtkwave")): - raise RuntimeError( - "Cannot find the gtkwave executable in the PATH environment variable. GUI not possible" - ) + raise RuntimeError("Cannot find the gtkwave executable in the PATH environment variable. GUI not possible") self._gui = gui self._gtkwave_fmt = "ghw" if gui and gtkwave_fmt is None else gtkwave_fmt @@ -122,9 +118,7 @@ def _get_version_output(cls, prefix): """ Get the output of 'ghdl --version' """ - return subprocess.check_output( - [str(Path(prefix) / cls.executable), "--version"] - ).decode() + return subprocess.check_output([str(Path(prefix) / cls.executable), "--version"]).decode() @classmethod def determine_backend(cls, prefix): @@ -147,9 +141,7 @@ def determine_backend(cls, prefix): print("== Output of 'ghdl --version'" + ("=" * 60)) print(output) print("=============================" + ("=" * 60)) - raise AssertionError( - "No known GHDL back-end could be detected from running 'ghdl --version'" - ) + raise AssertionError("No known GHDL back-end could be detected from running 'ghdl --version'") @classmethod def determine_version(cls, prefix): @@ -203,10 +195,7 @@ def setup_library_mapping(self, project): if not vhdl_standards: self._vhdl_standard = VHDL.STD_2008 elif len(vhdl_standards) != 1: - raise RuntimeError( - "GHDL cannot handle mixed VHDL standards, found %r" - % list(vhdl_standards) - ) + raise RuntimeError("GHDL cannot handle mixed VHDL standards, found %r" % list(vhdl_standards)) else: self._vhdl_standard = list(vhdl_standards)[0] @@ -254,10 +243,7 @@ def compile_vhdl_file_command(self, source_file): flags = source_file.compile_options.get("ghdl.flags", []) if flags != []: warn( - ( - "'ghdl.flags' is deprecated and it will be removed in future releases; " - "use 'ghdl.a_flags' instead" - ), + ("'ghdl.flags' is deprecated and it will be removed in future releases; " "use 'ghdl.a_flags' instead"), Warning, ) a_flags += flags @@ -272,9 +258,7 @@ def compile_vhdl_file_command(self, source_file): cmd += [source_file.name] return cmd - def _get_command( # pylint: disable=too-many-branches - self, config, output_path, elaborate_only, ghdl_e, wave_file - ): + def _get_command(self, config, output_path, elaborate_only, ghdl_e, wave_file): # pylint: disable=too-many-branches """ Return GHDL simulation command """ @@ -287,15 +271,10 @@ def _get_command( # pylint: disable=too-many-branches cmd += ["--std=%s" % self._std_str(self._vhdl_standard)] cmd += ["--work=%s" % config.library_name] - cmd += [ - "--workdir=%s" % self._project.get_library(config.library_name).directory - ] + cmd += ["--workdir=%s" % self._project.get_library(config.library_name).directory] cmd += ["-P%s" % lib.directory for lib in self._project.get_libraries()] - bin_path = str( - Path(output_path) - / ("%s-%s" % (config.entity_name, config.architecture_name)) - ) + bin_path = str(Path(output_path) / ("%s-%s" % (config.entity_name, config.architecture_name))) if self._has_output_flag(): cmd += ["-o", bin_path] cmd += config.sim_options.get("ghdl.elab_flags", []) @@ -329,10 +308,7 @@ def _get_command( # pylint: disable=too-many-branches with (Path(output_path) / "args.json").open("w") as fname: dump( { - "bin": str( - Path(output_path) - / ("%s-%s" % (config.entity_name, config.architecture_name)) - ), + "bin": str(Path(output_path) / ("%s-%s" % (config.entity_name, config.architecture_name))), "build": cmd[1:], "sim": sim, }, @@ -341,9 +317,7 @@ def _get_command( # pylint: disable=too-many-branches return cmd - def simulate( # pylint: disable=too-many-locals - self, output_path, test_suite_name, config, elaborate_only - ): + def simulate(self, output_path, test_suite_name, config, elaborate_only): # pylint: disable=too-many-locals """ Simulate with entity as top level using generics """ @@ -362,9 +336,7 @@ def simulate( # pylint: disable=too-many-locals else: data_file_name = None - cmd = self._get_command( - config, script_path, elaborate_only, ghdl_e, data_file_name - ) + cmd = self._get_command(config, script_path, elaborate_only, ghdl_e, data_file_name) status = True diff --git a/vunit/sim_if/incisive.py b/vunit/sim_if/incisive.py index 455de667b..0140c14d1 100644 --- a/vunit/sim_if/incisive.py +++ b/vunit/sim_if/incisive.py @@ -22,9 +22,7 @@ LOGGER = logging.getLogger(__name__) -class IncisiveInterface( # pylint: disable=too-many-instance-attributes - SimulatorInterface -): +class IncisiveInterface(SimulatorInterface): # pylint: disable=too-many-instance-attributes """ Interface for the Cadence Incisive simulator """ @@ -45,9 +43,7 @@ def add_arguments(parser): """ Add command line arguments """ - group = parser.add_argument_group( - "Incisive irun", description="Incisive irun-specific flags" - ) + group = parser.add_argument_group("Incisive irun", description="Incisive irun-specific flags") group.add_argument( "--cdslib", default=None, @@ -106,18 +102,14 @@ def find_cds_root_irun(self): """ Finds irun cds root """ - return subprocess.check_output( - [str(Path(self._prefix) / "cds_root"), "irun"] - ).splitlines()[0] + return subprocess.check_output([str(Path(self._prefix) / "cds_root"), "irun"]).splitlines()[0] def find_cds_root_virtuoso(self): """ Finds virtuoso cds root """ try: - return subprocess.check_output( - [str(Path(self._prefix) / "cds_root"), "virtuoso"] - ).splitlines()[0] + return subprocess.check_output([str(Path(self._prefix) / "cds_root"), "virtuoso"]).splitlines()[0] except subprocess.CalledProcessError: return None @@ -205,11 +197,7 @@ def compile_vhdl_file_command(self, source_file): args += ['-cdslib "%s"' % self._cdslib] args += self._hdlvar_args() args += [ - '-log "%s"' - % str( - Path(self._output_path) - / ("irun_compile_vhdl_file_%s.log" % source_file.library.name) - ) + '-log "%s"' % str(Path(self._output_path) / ("irun_compile_vhdl_file_%s.log" % source_file.library.name)) ] if not self._log_level == "debug": args += ["-quiet"] @@ -221,10 +209,7 @@ def compile_vhdl_file_command(self, source_file): args += ["-makelib %s" % source_file.library.directory] args += ['"%s"' % source_file.name] args += ["-endlib"] - argsfile = str( - Path(self._output_path) - / ("irun_compile_vhdl_file_%s.args" % source_file.library.name) - ) + argsfile = str(Path(self._output_path) / ("irun_compile_vhdl_file_%s.args" % source_file.library.name)) write_file(argsfile, "\n".join(args)) return [cmd, "-f", argsfile] @@ -248,11 +233,7 @@ def compile_verilog_file_command(self, source_file): args += ['-cdslib "%s"' % self._cdslib] args += self._hdlvar_args() args += [ - '-log "%s"' - % str( - Path(self._output_path) - / ("irun_compile_verilog_file_%s.log" % source_file.library.name) - ) + '-log "%s"' % str(Path(self._output_path) / ("irun_compile_verilog_file_%s.log" % source_file.library.name)) ] if not self._log_level == "debug": args += ["-quiet"] @@ -271,10 +252,7 @@ def compile_verilog_file_command(self, source_file): args += ["-makelib %s" % source_file.library.name] args += ['"%s"' % source_file.name] args += ["-endlib"] - argsfile = str( - Path(self._output_path) - / ("irun_compile_verilog_file_%s.args" % source_file.library.name) - ) + argsfile = str(Path(self._output_path) / ("irun_compile_verilog_file_%s.args" % source_file.library.name)) write_file(argsfile, "\n".join(args)) return [cmd, "-f", argsfile] @@ -289,10 +267,7 @@ def create_library(self, library_name, library_path, mapped_libraries=None): if not file_exists(lpath): os.makedirs(lpath) - if ( - library_name in mapped_libraries - and mapped_libraries[library_name] == library_path - ): + if library_name in mapped_libraries and mapped_libraries[library_name] == library_path: return cds = CDSFile.parse(self._cdslib) @@ -306,9 +281,7 @@ def _get_mapped_libraries(self): cds = CDSFile.parse(self._cdslib) return cds - def simulate( # pylint: disable=too-many-locals - self, output_path, test_suite_name, config, elaborate_only=False - ): + def simulate(self, output_path, test_suite_name, config, elaborate_only=False): # pylint: disable=too-many-locals """ Elaborates and Simulates with entity as top level using generics """ @@ -337,19 +310,11 @@ def simulate( # pylint: disable=too-many-locals args += ["-nowarn WRMNZD"] args += ["-nowarn DLCPTH"] # "cds.lib Invalid path" args += ["-nowarn DLCVAR"] # "cds.lib Invalid environment variable ''." - args += [ - "-ncerror EVBBOL" - ] # promote to error: "bad boolean literal in generic association" - args += [ - "-ncerror EVBSTR" - ] # promote to error: "bad string literal in generic association" - args += [ - "-ncerror EVBNAT" - ] # promote to error: "bad natural literal in generic association" + args += ["-ncerror EVBBOL"] # promote to error: "bad boolean literal in generic association" + args += ["-ncerror EVBSTR"] # promote to error: "bad string literal in generic association" + args += ["-ncerror EVBNAT"] # promote to error: "bad natural literal in generic association" args += ["-work work"] - args += [ - '-nclibdirname "%s"' % (str(Path(self._output_path) / "libraries")) - ] # @TODO: ugly + args += ['-nclibdirname "%s"' % (str(Path(self._output_path) / "libraries"))] # @TODO: ugly args += config.sim_options.get("incisive.irun_sim_flags", []) args += ['-cdslib "%s"' % self._cdslib] args += self._hdlvar_args() diff --git a/vunit/sim_if/modelsim.py b/vunit/sim_if/modelsim.py index 0e7f05cd7..2e3045699 100644 --- a/vunit/sim_if/modelsim.py +++ b/vunit/sim_if/modelsim.py @@ -21,9 +21,7 @@ LOGGER = logging.getLogger(__name__) -class ModelSimInterface( - VsimSimulatorMixin, SimulatorInterface -): # pylint: disable=too-many-instance-attributes +class ModelSimInterface(VsimSimulatorMixin, SimulatorInterface): # pylint: disable=too-many-instance-attributes """ Mentor Graphics ModelSim interface @@ -108,9 +106,7 @@ def _create_modelsim_ini(self): if not file_exists(parent): os.makedirs(parent) - original_modelsim_ini = os.environ.get( - "VUNIT_MODELSIM_INI", str(Path(self._prefix).parent / "modelsim.ini") - ) + original_modelsim_ini = os.environ.get("VUNIT_MODELSIM_INI", str(Path(self._prefix).parent / "modelsim.ini")) with Path(original_modelsim_ini).open("rb") as fread: with Path(self._sim_cfg_file_name).open("wb") as fwrite: fwrite.write(fread.read()) @@ -212,9 +208,7 @@ def create_library(self, library_name, path, mapped_libraries=None): os.makedirs(apath) if not file_exists(path): - proc = Process( - [str(Path(self._prefix) / "vlib"), "-unix", path], env=self.get_env() - ) + proc = Process([str(Path(self._prefix) / "vlib"), "-unix", path], env=self.get_env()) proc.consume_output(callback=None) if library_name in mapped_libraries and mapped_libraries[library_name] == path: @@ -245,9 +239,7 @@ def _create_load_function(self, test_suite_name, config, output_path): for name, value in config.generics.items() ) ) - pli_str = " ".join( - "-pli {%s}" % fix_path(name) for name in config.sim_options.get("pli", []) - ) + pli_str = " ".join("-pli {%s}" % fix_path(name) for name in config.sim_options.get("pli", [])) if config.architecture_name is None: architecture_suffix = "" @@ -257,9 +249,9 @@ def _create_load_function(self, test_suite_name, config, output_path): if config.sim_options.get("enable_coverage", False): coverage_file = str(Path(output_path) / "coverage.ucdb") self._coverage_files.add(coverage_file) - coverage_save_cmd = ( - "coverage save -onexit -testname {%s} -assert -directive -cvg -codeAll {%s}" - % (test_suite_name, fix_path(coverage_file)) + coverage_save_cmd = "coverage save -onexit -testname {%s} -assert -directive -cvg -codeAll {%s}" % ( + test_suite_name, + fix_path(coverage_file), ) coverage_args = "-coverage" else: @@ -320,12 +312,8 @@ def _create_load_function(self, test_suite_name, config, output_path): """.format( coverage_save_cmd=coverage_save_cmd, vsim_flags=" ".join(vsim_flags), - break_on_assert=vhdl_assert_stop_level_mapping[ - config.vhdl_assert_stop_level - ], - no_warnings=1 - if config.sim_options.get("disable_ieee_warnings", False) - else 0, + break_on_assert=vhdl_assert_stop_level_mapping[config.vhdl_assert_stop_level], + no_warnings=1 if config.sim_options.get("disable_ieee_warnings", False) else 0, ) return tcl @@ -374,9 +362,7 @@ def _vsim_extra_args(self, config): vsim_extra_args = config.sim_options.get("modelsim.vsim_flags", vsim_extra_args) if self._gui: - vsim_extra_args = config.sim_options.get( - "modelsim.vsim_flags.gui", vsim_extra_args - ) + vsim_extra_args = config.sim_options.get("modelsim.vsim_flags.gui", vsim_extra_args) return " ".join(vsim_extra_args) @@ -392,12 +378,7 @@ def merge_coverage(self, file_name, args=None): args = [] coverage_files = str(Path(self._output_path) / "coverage_files.txt") - vcover_cmd = ( - [str(Path(self._prefix) / "vcover"), "merge", "-inputs"] - + [coverage_files] - + args - + [file_name] - ) + vcover_cmd = [str(Path(self._prefix) / "vcover"), "merge", "-inputs"] + [coverage_files] + args + [file_name] with Path(coverage_files).open("w", encoding="utf-8") as fptr: for coverage_file in self._coverage_files: if file_exists(coverage_file): diff --git a/vunit/sim_if/rivierapro.py b/vunit/sim_if/rivierapro.py index 14e662348..c90c9ebb5 100644 --- a/vunit/sim_if/rivierapro.py +++ b/vunit/sim_if/rivierapro.py @@ -75,9 +75,7 @@ def _get_version(cls): """ Return a VersionConsumer object containing the simulator version. """ - proc = Process( - [str(Path(cls.find_prefix()) / "vcom"), "-version"], env=cls.get_env() - ) + proc = Process([str(Path(cls.find_prefix()) / "vcom"), "-version"], env=cls.get_env()) consumer = VersionConsumer() proc.consume_output(consumer) @@ -161,9 +159,7 @@ def _std_str(self, vhdl_standard): """ if vhdl_standard == VHDL.STD_2019: if self._version.year is not None: - if (self._version.year == 2020 and self._version.month < 4) or ( - self._version.year < 2020 - ): + if (self._version.year == 2020 and self._version.month < 4) or (self._version.year < 2020): return "-2018" return "-2019" @@ -265,9 +261,7 @@ def _get_mapped_libraries(self, library_cfg_file): Get mapped libraries by running vlist on the working directory """ lines = [] - proc = Process( - [str(Path(self._prefix) / "vlist")], cwd=str(Path(library_cfg_file).parent) - ) + proc = Process([str(Path(self._prefix) / "vlist")], cwd=str(Path(library_cfg_file).parent)) proc.consume_output(callback=lines.append) libraries = {} @@ -277,14 +271,10 @@ def _get_mapped_libraries(self, library_cfg_file): continue key = match.group(1) value = match.group(2) - libraries[key] = str( - (Path(library_cfg_file).parent / (Path(value).parent)).resolve() - ) + libraries[key] = str((Path(library_cfg_file).parent / (Path(value).parent)).resolve()) return libraries - def _create_load_function( - self, test_suite_name, config, output_path # pylint: disable=unused-argument - ): + def _create_load_function(self, test_suite_name, config, output_path): # pylint: disable=unused-argument """ Create the vunit_load TCL function that runs the vsim command and loads the design """ @@ -294,9 +284,7 @@ def _create_load_function( for name, value in config.generics.items() ) ) - pli_str = " ".join( - '-pli "%s"' % fix_path(name) for name in config.sim_options.get("pli", []) - ) + pli_str = " ".join('-pli "%s"' % fix_path(name) for name in config.sim_options.get("pli", [])) vsim_flags = [ "-dataset {%s}" % fix_path(str(Path(output_path) / "dataset.asdb")), @@ -358,14 +346,10 @@ def _vsim_extra_args(self, config): Determine vsim_extra_args """ vsim_extra_args = [] - vsim_extra_args = config.sim_options.get( - "rivierapro.vsim_flags", vsim_extra_args - ) + vsim_extra_args = config.sim_options.get("rivierapro.vsim_flags", vsim_extra_args) if self._gui: - vsim_extra_args = config.sim_options.get( - "rivierapro.vsim_flags.gui", vsim_extra_args - ) + vsim_extra_args = config.sim_options.get("rivierapro.vsim_flags.gui", vsim_extra_args) return " ".join(vsim_extra_args) diff --git a/vunit/sim_if/vsim_simulator_mixin.py b/vunit/sim_if/vsim_simulator_mixin.py index 8d7631aab..9c5b1cdad 100644 --- a/vunit/sim_if/vsim_simulator_mixin.py +++ b/vunit/sim_if/vsim_simulator_mixin.py @@ -28,9 +28,7 @@ def __init__(self, prefix, persistent, sim_cfg_file_name): sim_cfg_file_name = str(Path(sim_cfg_file_name).resolve()) self._sim_cfg_file_name = sim_cfg_file_name - prefix = ( - self._prefix - ) # Avoid circular dependency inhibiting process destruction + prefix = self._prefix # Avoid circular dependency inhibiting process destruction env = self.get_env() def create_process(ident): @@ -93,9 +91,7 @@ def _create_restart_function(): ) % (recompile_command, str(Path(os.getcwd()).resolve())), ] - recompile_command_eval_tcl = " ".join( - ["{%s}" % part for part in recompile_command_eval] - ) + recompile_command_eval_tcl = " ".join(["{%s}" % part for part in recompile_command_eval]) tcl = """ proc vunit_compile {} { @@ -324,13 +320,9 @@ def simulate(self, output_path, test_suite_name, config, elaborate_only): write_file( str(common_file_name), - self._create_common_script( - test_suite_name, config, script_path, output_path - ), - ) - write_file( - str(gui_file_name), self._create_gui_script(str(common_file_name), config) + self._create_common_script(test_suite_name, config, script_path, output_path), ) + write_file(str(gui_file_name), self._create_gui_script(str(common_file_name), config)) write_file( str(batch_file_name), self._create_batch_script(str(common_file_name), elaborate_only), diff --git a/vunit/source_file.py b/vunit/source_file.py index fd2e8ce9c..288448ac7 100644 --- a/vunit/source_file.py +++ b/vunit/source_file.py @@ -140,9 +140,7 @@ def __init__( # pylint: disable=too-many-arguments self.module_dependencies = [] self.include_dirs = include_dirs if include_dirs is not None else [] self.defines = defines.copy() if defines is not None else {} - self._content_hash = file_content_hash( - self.name, encoding=HDL_FILE_ENCODING, database=database - ) + self._content_hash = file_content_hash(self.name, encoding=HDL_FILE_ENCODING, database=database) for path in self.include_dirs: self._content_hash = hash_string(self._content_hash + hash_string(path)) @@ -230,9 +228,7 @@ def __init__( # pylint: disable=too-many-arguments else: self._add_design_file(design_file) - self._content_hash = file_content_hash( - self.name, encoding=HDL_FILE_ENCODING, database=database - ) + self._content_hash = file_content_hash(self.name, encoding=HDL_FILE_ENCODING, database=database) def get_vhdl_standard(self) -> VHDLStandard: """ @@ -291,9 +287,7 @@ def _find_dependencies(self, design_file): result.append(ref) for configuration in design_file.configurations: - result.append( - VHDLReference("entity", self.library.name, configuration.entity, "all") - ) + result.append(VHDLReference("entity", self.library.name, configuration.entity, "all")) return result @@ -324,16 +318,10 @@ def _find_design_units(self, design_file): ) for configuration in design_file.configurations: - result.append( - VHDLDesignUnit(configuration.identifier, self, "configuration") - ) + result.append(VHDLDesignUnit(configuration.identifier, self, "configuration")) for body in design_file.package_bodies: - result.append( - VHDLDesignUnit( - body.identifier, self, "package body", False, body.identifier - ) - ) + result.append(VHDLDesignUnit(body.identifier, self, "package body", False, body.identifier)) return result @@ -342,11 +330,7 @@ def content_hash(self): """ Compute hash of contents and compile options """ - return hash_string( - self._content_hash - + self._compile_options_hash() - + hash_string(str(self._vhdl_standard)) - ) + return hash_string(self._content_hash + self._compile_options_hash() + hash_string(str(self._vhdl_standard))) def add_to_library(self, library): """ diff --git a/vunit/test/bench.py b/vunit/test/bench.py index b900134dd..7b2dc7efd 100644 --- a/vunit/test/bench.py +++ b/vunit/test/bench.py @@ -73,10 +73,7 @@ def get_default_config(self): Get the default configuration of this test bench """ if self._individual_tests: - raise RuntimeError( - "Test bench %s.%s has individually configured tests" - % (self.library_name, self.name) - ) + raise RuntimeError("Test bench %s.%s has individually configured tests" % (self.library_name, self.name)) return self._configs[DEFAULT_NAME] @staticmethod @@ -87,9 +84,7 @@ def _check_architectures(design_unit): """ if design_unit.is_entity: if not design_unit.architecture_names: - raise RuntimeError( - "Test bench '%s' has no architecture." % design_unit.name - ) + raise RuntimeError("Test bench '%s' has no architecture." % design_unit.name) if len(design_unit.architecture_names) > 1: raise RuntimeError( @@ -99,9 +94,7 @@ def _check_architectures(design_unit): design_unit.name, ", ".join( "%s:%s" % (name, str(Path(fname).name)) - for name, fname in sorted( - design_unit.architecture_names.items() - ) + for name, fname in sorted(design_unit.architecture_names.items()) ), ) ) @@ -235,13 +228,9 @@ def parse(content): assert len(tests) == 1 self._implicit_test = tests[0] - self._individual_tests = ( - "run_all_in_same_sim" not in attribute_names and len(explicit_tests) > 0 - ) + self._individual_tests = "run_all_in_same_sim" not in attribute_names and len(explicit_tests) > 0 self._test_cases = [ - TestConfigurationVisitor( - test, self.design_unit, self._individual_tests, default_config.copy() - ) + TestConfigurationVisitor(test, self.design_unit, self._individual_tests, default_config.copy()) for test in explicit_tests ] @@ -268,9 +257,7 @@ def from_line_offsets(file_name, offset, length, line_offsets): """ Create FileLocation with lineno computed from line offsets """ - return FileLocation( - file_name, offset, length, _lookup_lineno(offset, line_offsets) - ) + return FileLocation(file_name, offset, length, _lookup_lineno(offset, line_offsets)) def __init__(self, file_name, offset, length, lineno): self.file_name = file_name @@ -374,9 +361,7 @@ def get_default_config(self): def _check_enabled(self): if not self._enable_configuration: - raise RuntimeError( - "Individual test configuration is not possible with run_all_in_same_sim" - ) + raise RuntimeError("Individual test configuration is not possible with run_all_in_same_sim") def get_configuration_dicts(self): """ @@ -409,9 +394,7 @@ def create_tests(self, simulator_if, elaborate_only, test_list=None): ) -_RE_VHDL_TEST_CASE = re.compile( - r'(\s|\()+run\s*\(\s*"(?P.*?)"\s*\)', re.IGNORECASE -) +_RE_VHDL_TEST_CASE = re.compile(r'(\s|\()+run\s*\(\s*"(?P.*?)"\s*\)', re.IGNORECASE) _RE_VERILOG_TEST_CASE = re.compile(r'`TEST_CASE\s*\(\s*"(?P.*?)"\s*\)') _RE_VHDL_TEST_SUITE = re.compile(r"test_runner_setup\s*\(", re.IGNORECASE) _RE_VERILOG_TEST_SUITE = re.compile(r"`TEST_SUITE\b") @@ -586,12 +569,8 @@ def associate(attr): _RE_ATTR_NAME = r"[a-zA-Z0-9_\-]+" -_RE_ATTRIBUTE = re.compile( - r"vunit:\s*(?P\.?" + _RE_ATTR_NAME + r")", re.IGNORECASE -) -_RE_PRAGMA_LEGACY = re.compile( - r"vunit_pragma\s+(?P" + _RE_ATTR_NAME + ")", re.IGNORECASE -) +_RE_ATTRIBUTE = re.compile(r"vunit:\s*(?P\.?" + _RE_ATTR_NAME + r")", re.IGNORECASE) +_RE_PRAGMA_LEGACY = re.compile(r"vunit_pragma\s+(?P" + _RE_ATTR_NAME + ")", re.IGNORECASE) _VALID_ATTRIBUTES = ["run_all_in_same_sim", "fail_on_warning"] @@ -627,10 +606,7 @@ def _find(attr_class, regex): location = FileLocation.from_match(file_name, match, "name", line_offsets) if not _is_user_attribute(name) and name not in _VALID_ATTRIBUTES: - raise RuntimeError( - "Invalid attribute '%s' in %s line %i" - % (name, file_name, location.lineno) - ) + raise RuntimeError("Invalid attribute '%s' in %s line %i" % (name, file_name, location.lineno)) attributes.append(attr_class(name, value=None, location=location)) @@ -642,9 +618,7 @@ def _find(attr_class, regex): # Add value field to be forwards compatible with having attribute values Attribute = collections.namedtuple("Attribute", ["name", "value", "location"]) -LegacyAttribute = collections.namedtuple( - "LegacyAttribute", ["name", "value", "location"] -) +LegacyAttribute = collections.namedtuple("LegacyAttribute", ["name", "value", "location"]) VERILOG_REMOVE_COMMENT_RE = re.compile(r"(//[^\n]*)|(/\*.*?\*/)", re.DOTALL) diff --git a/vunit/test/bench_list.py b/vunit/test/bench_list.py index 6668b8a21..897f2e105 100644 --- a/vunit/test/bench_list.py +++ b/vunit/test/bench_list.py @@ -111,8 +111,7 @@ def tb_filter(design_unit): elif has_runner_cfg and not has_tb_name: LOGGER.warning( - "%s %s has runner_cfg %s but the file name and the %s name does not match regex %s\n" - "in file %s", + "%s %s has runner_cfg %s but the file name and the %s name does not match regex %s\n" "in file %s", design_unit_type, design_unit.name, generic_type, diff --git a/vunit/test/list.py b/vunit/test/list.py index 02303228b..16d2cc73b 100644 --- a/vunit/test/list.py +++ b/vunit/test/list.py @@ -33,9 +33,7 @@ def keep_matches(self, test_filter): """ Keep only testcases matching any pattern """ - self._test_suites = [ - test for test in self._test_suites if test.keep_matches(test_filter) - ] + self._test_suites = [test for test in self._test_suites if test.keep_matches(test_filter)] @property def num_tests(self): diff --git a/vunit/test/report.py b/vunit/test/report.py index 6320d4bde..20e5fb6a2 100644 --- a/vunit/test/report.py +++ b/vunit/test/report.py @@ -91,9 +91,7 @@ def print_latest_status(self, total_tests): args.append("F=%i" % len(failed)) args.append("T=%i" % total_tests) - self._printer.write( - " (%s) %s (%.1f seconds)\n" % (" ".join(args), result.name, result.time) - ) + self._printer.write(" (%s) %s (%.1f seconds)\n" % (" ".join(args), result.name, result.time)) def all_ok(self): """ @@ -122,9 +120,7 @@ def print_str(self): prefix = "==== Summary " max_len = max(len(test.name) for test in all_tests) - self._printer.write( - "%s%s\n" % (prefix, "=" * (max(max_len - len(prefix) + 25, 0))) - ) + self._printer.write("%s%s\n" % (prefix, "=" * (max(max_len - len(prefix) + 25, 0)))) for test_result in all_tests: test_result.print_status(self._printer, padding=max_len) @@ -283,9 +279,7 @@ def print_status(self, printer, padding=0): my_padding = max(padding - len(self.name), 0) - printer.write( - "%s (%.1f seconds)\n" % (self.name + (" " * my_padding), self.time) - ) + printer.write("%s (%.1f seconds)\n" % (self.name + (" " * my_padding), self.time)) def to_xml(self, xunit_xml_format): """ diff --git a/vunit/test/runner.py b/vunit/test/runner.py index 7d6136ff1..1afaa478d 100644 --- a/vunit/test/runner.py +++ b/vunit/test/runner.py @@ -152,9 +152,7 @@ def _run_thread(self, write_stdout, scheduler, num_tests, is_main): print("Starting %s" % test_name) print("Output file: %s" % output_file_name) - self._run_test_suite( - test_suite, write_stdout, num_tests, output_path, output_file_name - ) + self._run_test_suite(test_suite, write_stdout, num_tests, output_path, output_file_name) except StopIteration: return @@ -177,9 +175,7 @@ def _get_output_path(self, test_suite_name): Ensure no bad characters and no long path names. """ output_path = str(Path(self._output_path).resolve()) - safe_name = ( - "".join(char if _is_legal(char) else "_" for char in test_suite_name) + "_" - ) + safe_name = "".join(char if _is_legal(char) else "_" for char in test_suite_name) + "_" hash_name = hash_string(test_suite_name) if "VUNIT_SHORT_TEST_OUTPUT_PATHS" in os.environ: @@ -188,22 +184,13 @@ def _get_output_path(self, test_suite_name): max_path = 260 margin = int(os.environ.get("VUNIT_TEST_OUTPUT_PATH_MARGIN", "100")) prefix_len = len(output_path) - full_name = ( - safe_name[ - : min( - max_path - margin - prefix_len - len(hash_name), len(safe_name) - ) - ] - + hash_name - ) + full_name = safe_name[: min(max_path - margin - prefix_len - len(hash_name), len(safe_name))] + hash_name else: full_name = safe_name + hash_name return str(Path(output_path) / full_name) - def _add_skipped_tests( - self, test_suite, results, start_time, num_tests, output_file_name - ): + def _add_skipped_tests(self, test_suite, results, start_time, num_tests, output_file_name): """ Add skipped tests """ @@ -228,9 +215,7 @@ def _run_test_suite( # pylint: disable=too-many-locals try: self._prepare_test_suite_output_path(output_path) output_file = wrap( - Path(output_file_name).open( # pylint: disable=consider-using-with - "a+", encoding="utf-8" - ), + Path(output_file_name).open("a+", encoding="utf-8"), # pylint: disable=consider-using-with use_color=False, ) output_file.seek(0) @@ -239,9 +224,7 @@ def _run_test_suite( # pylint: disable=too-many-locals if write_stdout: output_from = self._stdout_ansi else: - color_output_file = Path( - color_output_file_name - ).open( # pylint: disable=consider-using-with + color_output_file = Path(color_output_file_name).open( # pylint: disable=consider-using-with "w", encoding="utf-8" ) output_from = color_output_file @@ -260,9 +243,7 @@ def read_output(): results = test_suite.run(output_path=output_path, read_output=read_output) except KeyboardInterrupt as exk: - self._add_skipped_tests( - test_suite, results, start_time, num_tests, output_file_name - ) + self._add_skipped_tests(test_suite, results, start_time, num_tests, output_file_name) raise KeyboardInterrupt from exk except: # pylint: disable=bare-except if self._dont_catch_exceptions: @@ -273,9 +254,7 @@ def read_output(): finally: self._local.output = self._stdout - for fptr in ( - ptr for ptr in [output_file, color_output_file] if ptr is not None - ): + for fptr in (ptr for ptr in [output_file, color_output_file] if ptr is not None): fptr.flush() fptr.close() @@ -283,16 +262,10 @@ def read_output(): with self._stdout_lock(): - if ( - (color_output_file is not None) - and (any_not_passed or self._is_verbose) - and not self._is_quiet - ): + if (color_output_file is not None) and (any_not_passed or self._is_verbose) and not self._is_quiet: self._print_output(color_output_file_name) - self._add_results( - test_suite, results, start_time, num_tests, output_file_name - ) + self._add_results(test_suite, results, start_time, num_tests, output_file_name) if self._fail_fast and any_not_passed: self._abort = True @@ -338,9 +311,7 @@ def _print_output(self, output_file_name): for line in fread.readlines(): self._stdout_ansi.write(line) - def _add_results( - self, test_suite, results, start_time, num_tests, output_file_name - ): + def _add_results(self, test_suite, results, start_time, num_tests, output_file_name): """ Add results to test report """ diff --git a/vunit/test/suites.py b/vunit/test/suites.py index 800215f25..0334d29b8 100644 --- a/vunit/test/suites.py +++ b/vunit/test/suites.py @@ -129,9 +129,7 @@ def _merge_attributes(attribute_names, attributes): for test in self._tests if test_filter( name=_full_name(self.name, test.name), - attribute_names=_merge_attributes( - test.attribute_names, self._configuration.attributes - ), + attribute_names=_merge_attributes(test.attribute_names, self._configuration.attributes), ) ] self._run.set_test_cases([test.name for test in self._tests]) @@ -142,10 +140,7 @@ def run(self, *args, **kwargs): Run the test suite using output_path """ results = self._run.run(*args, **kwargs) - results = { - _full_name(self._name, test_name): result - for test_name, result in results.items() - } + results = {_full_name(self._name, test_name): result for test_name, result in results.items()} return results @@ -154,9 +149,7 @@ class TestRun(object): A single simulation run yielding the results for one or several test cases """ - def __init__( - self, simulator_if, config, elaborate_only, test_suite_name, test_cases - ): + def __init__(self, simulator_if, config, elaborate_only, test_suite_name, test_cases): self._simulator_if = simulator_if self._config = config self._elaborate_only = elaborate_only @@ -176,9 +169,7 @@ def run(self, output_path, read_output): for name in self._test_cases: results[name] = FAILED - if not self._config.call_pre_config( - output_path, self._simulator_if.output_path - ): + if not self._config.call_pre_config(output_path, self._simulator_if.output_path): return results # Ensure result file exists @@ -215,10 +206,7 @@ def _check_results(self, results, sim_ok): if self._simulator_if.has_valid_exit_code() and not sim_ok: return ( True, - dict( - (name, FAILED) if results[name] is PASSED else (name, results[name]) - for name in results - ), + dict((name, FAILED) if results[name] is PASSED else (name, results[name]) for name in results), ) return False, results @@ -230,17 +218,12 @@ def _simulate(self, output_path): config = self._config.copy() - if ( - "output_path" in config.generic_names - and "output_path" not in config.generics - ): + if "output_path" in config.generic_names and "output_path" not in config.generics: config.generics["output_path"] = "%s/" % output_path.replace("\\", "/") runner_cfg = { "enabled_test_cases": ",".join( - encode_test_case(test_case) - for test_case in self._test_cases - if test_case is not None + encode_test_case(test_case) for test_case in self._test_cases if test_case is not None ), "use_color": self._simulator_if.use_color, "output path": output_path.replace("\\", "/") + "/", diff --git a/vunit/ui/__init__.py b/vunit/ui/__init__.py index 13e1d2509..b97bb235d 100644 --- a/vunit/ui/__init__.py +++ b/vunit/ui/__init__.py @@ -43,9 +43,7 @@ from .results import Results -class VUnit( # pylint: disable=too-many-instance-attributes, too-many-public-methods - object -): +class VUnit(object): # pylint: disable=too-many-instance-attributes, too-many-public-methods """ The public interface of VUnit @@ -81,9 +79,7 @@ def from_argv( """ args = VUnitCLI().parse_args(argv=argv) - return cls.from_args( - args, compile_builtins=compile_builtins, vhdl_standard=vhdl_standard - ) + return cls.from_args(args, compile_builtins=compile_builtins, vhdl_standard=vhdl_standard) @classmethod def from_args( @@ -147,9 +143,7 @@ def test_filter(name, attribute_names): self._simulator_output_path = str(Path(self._output_path) / "none") else: simulator_class = self._simulator_class - self._simulator_output_path = str( - Path(self._output_path) / simulator_class.name - ) + self._simulator_output_path = str(Path(self._output_path) / simulator_class.name) self._create_output_path(args.clean) @@ -198,9 +192,7 @@ def _configure_logging(log_level): Configure logging based on log_level string """ level = getattr(logging, log_level.upper()) - logging.basicConfig( - filename=None, format="%(levelname)7s - %(message)s", level=level - ) + logging.basicConfig(filename=None, format="%(levelname)7s - %(message)s", level=level) def _which_vhdl_standard(self, vhdl_standard: Optional[str]) -> VHDLStandard: """ @@ -212,9 +204,7 @@ def _which_vhdl_standard(self, vhdl_standard: Optional[str]) -> VHDLStandard: return VHDL.standard(vhdl_standard) - def add_external_library( - self, library_name, path: Union[str, Path], vhdl_standard: Optional[str] = None - ): + def add_external_library(self, library_name, path: Union[str, Path], vhdl_standard: Optional[str] = None): """ Add an externally compiled library as a black-box @@ -240,9 +230,7 @@ def add_external_library( ) return self.library(library_name) - def add_source_files_from_csv( - self, project_csv_path: Union[str, Path], vhdl_standard: Optional[str] = None - ): + def add_source_files_from_csv(self, project_csv_path: Union[str, Path], vhdl_standard: Optional[str] = None): """ Add a project configuration, mapping all the libraries and files @@ -266,18 +254,12 @@ def add_source_files_from_csv( lib_name = row[0].strip() no_normalized_file = row[1].strip() file_name_ = str((ppath.parent / no_normalized_file).resolve()) - lib = ( - self.library(lib_name) - if lib_name in libs - else self.add_library(lib_name) - ) + lib = self.library(lib_name) if lib_name in libs else self.add_library(lib_name) libs.add(lib_name) file_ = lib.add_source_file(file_name_, vhdl_standard=vhdl_standard) files.append(file_) elif len(row) > 2: - LOGGER.error( - "More than one library and one file in csv description" - ) + LOGGER.error("More than one library and one file in csv description") return files def add_library( @@ -309,10 +291,7 @@ def add_library( if not self._project.has_library(library_name): self._project.add_library(library_name, str(path.resolve()), standard) elif not allow_duplicate: - raise ValueError( - "Library %s already added. Use allow_duplicate to ignore this error." - % library_name - ) + raise ValueError("Library %s already added. Use allow_duplicate to ignore this error." % library_name) return self.library(library_name) def library(self, library_name: str): @@ -344,9 +323,7 @@ def set_attribute(self, name: str, value: str, allow_empty: Optional[bool] = Fal Only affects test benches added *before* the attribute is set. """ test_benches = self._test_bench_list.get_test_benches() - for test_bench in check_not_empty( - test_benches, allow_empty, "No test benches found" - ): + for test_bench in check_not_empty(test_benches, allow_empty, "No test benches found"): test_bench.set_attribute(name, value) def set_generic(self, name: str, value: str, allow_empty: Optional[bool] = False): @@ -367,9 +344,7 @@ def set_generic(self, name: str, value: str, allow_empty: Optional[bool] = False Only affects test benches added *before* the generic is set. """ test_benches = self._test_bench_list.get_test_benches() - for test_bench in check_not_empty( - test_benches, allow_empty, "No test benches found" - ): + for test_bench in check_not_empty(test_benches, allow_empty, "No test benches found"): test_bench.set_generic(name.lower(), value) def set_parameter(self, name: str, value: str, allow_empty: Optional[bool] = False): @@ -390,9 +365,7 @@ def set_parameter(self, name: str, value: str, allow_empty: Optional[bool] = Fal Only affects test benches added *before* the parameter is set. """ test_benches = self._test_bench_list.get_test_benches() - for test_bench in check_not_empty( - test_benches, allow_empty, "No test benches found" - ): + for test_bench in check_not_empty(test_benches, allow_empty, "No test benches found"): test_bench.set_generic(name, value) def set_sim_option( @@ -420,14 +393,10 @@ def set_sim_option( Only affects test benches added *before* the option is set. """ test_benches = self._test_bench_list.get_test_benches() - for test_bench in check_not_empty( - test_benches, allow_empty, "No test benches found" - ): + for test_bench in check_not_empty(test_benches, allow_empty, "No test benches found"): test_bench.set_sim_option(name, value, overwrite) - def set_compile_option( - self, name: str, value: str, allow_empty: Optional[bool] = False - ): + def set_compile_option(self, name: str, value: str, allow_empty: Optional[bool] = False): """ Set compile option of all files @@ -446,14 +415,10 @@ def set_compile_option( Only affects files added *before* the option is set. """ source_files = self._project.get_source_files_in_order() - for source_file in check_not_empty( - source_files, allow_empty, "No source files found" - ): + for source_file in check_not_empty(source_files, allow_empty, "No source files found"): source_file.set_compile_option(name, value) - def add_compile_option( - self, name: str, value: str, allow_empty: Optional[bool] = False - ): + def add_compile_option(self, name: str, value: str, allow_empty: Optional[bool] = False): """ Add compile option to all files @@ -465,14 +430,10 @@ def add_compile_option( Only affects files added *before* the option is set. """ source_files = self._project.get_source_files_in_order() - for source_file in check_not_empty( - source_files, allow_empty, "No source files found" - ): + for source_file in check_not_empty(source_files, allow_empty, "No source files found"): source_file.add_compile_option(name, value) - def get_source_file( - self, file_name: Union[str, Path], library_name: Optional[str] = None - ): + def get_source_file(self, file_name: Union[str, Path], library_name: Optional[str] = None): """ Get a source file @@ -485,17 +446,12 @@ def get_source_file( files = self.get_source_files(fstr, library_name, allow_empty=True) if len(files) > 1: - raise ValueError( - "Found file named '%s' in multiple-libraries, " - "add explicit library_name." % fstr - ) + raise ValueError("Found file named '%s' in multiple-libraries, " "add explicit library_name." % fstr) if not files: if library_name is None: raise ValueError("Found no file named '%s'" % fstr) - raise ValueError( - "Found no file named '%s' in library '%s'" % (fstr, library_name) - ) + raise ValueError("Found no file named '%s' in library '%s'" % (fstr, library_name)) return files[0] def get_source_files( @@ -530,9 +486,7 @@ def get_source_files( results, allow_empty, ("Pattern %r did not match any file" % pattern) - + ( - ("within library %s" % library_name) if library_name is not None else "" - ), + + (("within library %s" % library_name) if library_name is not None else ""), ) return SourceFileList(results) @@ -624,9 +578,7 @@ def add_source_file( # pylint: disable=too-many-arguments file_type=file_type, ) - def _preprocess( - self, library_name: str, file_name: Union[str, Path], preprocessors - ): + def _preprocess(self, library_name: str, file_name: Union[str, Path], preprocessors): """ Preprocess file_name within library_name using explicit preprocessors if preprocessors is None then use implicit globally defined processors @@ -660,13 +612,9 @@ def _preprocess( idx = 1 while ostools.file_exists(pp_file_name): - LOGGER.debug( - "Preprocessed file exists '%s', adding prefix", pp_file_name - ) + LOGGER.debug("Preprocessed file exists '%s', adding prefix", pp_file_name) pp_file_name = str( - Path(self._preprocessed_path) - / library_name - / ("%i_%s" % (idx, fname)), + Path(self._preprocessed_path) / library_name / ("%i_%s" % (idx, fname)), ) idx += 1 @@ -679,9 +627,7 @@ def add_preprocessor(self, preprocessor): """ self._external_preprocessors.append(preprocessor) - def enable_location_preprocessing( - self, additional_subprograms=None, exclude_subprograms=None - ): + def enable_location_preprocessing(self, additional_subprograms=None, exclude_subprograms=None): """ Inserts file name and line number information into VUnit check and log subprograms calls. Custom subprograms can also be added. Must be called before adding any files. @@ -746,9 +692,7 @@ def _create_tests(self, simulator_if: Union[None, SimulatorInterface]): Create the test cases """ self._test_bench_list.warn_when_empty() - test_list = self._test_bench_list.create_tests( - simulator_if, self._args.elaborate - ) + test_list = self._test_bench_list.create_tests(simulator_if, self._args.elaborate) test_list.keep_matches(self._test_filter) return test_list @@ -788,9 +732,7 @@ def _create_simulator_if(self): if not Path(self._simulator_output_path).exists(): os.makedirs(self._simulator_output_path) - return self._simulator_class.from_args( - args=self._args, output_path=self._simulator_output_path - ) + return self._simulator_class.from_args(args=self._args, output_path=self._simulator_output_path) def _main_run(self, post_run): """ @@ -836,9 +778,7 @@ def _main_list_only(self): print("Listed %i tests" % test_list.num_tests) return True - def _main_export_json( - self, json_file_name: Union[str, Path] - ): # pylint: disable=too-many-locals + def _main_export_json(self, json_file_name: Union[str, Path]): # pylint: disable=too-many-locals """ Main function when exporting to JSON """ @@ -958,9 +898,7 @@ def _get_testbench_files(self, simulator_if: Union[None, SimulatorInterface]): test_list = self._create_tests(simulator_if) tb_file_names = {test_suite.file_name for test_suite in test_list} return [ - self.get_source_file( # pylint: disable=protected-access - file_name - )._source_file + self.get_source_file(file_name)._source_file # pylint: disable=protected-access for file_name in tb_file_names ] @@ -1055,17 +993,9 @@ def get_compile_order(self, source_files=None): if source_files is None: source_files = self.get_source_files(allow_empty=True) - target_files = [ - source_file._source_file # pylint: disable=protected-access - for source_file in source_files - ] + target_files = [source_file._source_file for source_file in source_files] # pylint: disable=protected-access source_files = self._project.get_dependencies_in_compile_order(target_files) - return SourceFileList( - [ - SourceFile(source_file, self._project, self) - for source_file in source_files - ] - ) + return SourceFileList([SourceFile(source_file, self._project, self) for source_file in source_files]) def get_implementation_subset(self, source_files): """ @@ -1076,19 +1006,9 @@ def get_implementation_subset(self, source_files): :param source_files: A list of :class:`.SourceFile` objects :returns: A list of :class:`.SourceFile` objects which is the implementation subset. """ - target_files = [ - source_file._source_file # pylint: disable=protected-access - for source_file in source_files - ] - source_files = self._project.get_dependencies_in_compile_order( - target_files, implementation_dependencies=True - ) - return SourceFileList( - [ - SourceFile(source_file, self._project, self) - for source_file in source_files - ] - ) + target_files = [source_file._source_file for source_file in source_files] # pylint: disable=protected-access + source_files = self._project.get_dependencies_in_compile_order(target_files, implementation_dependencies=True) + return SourceFileList([SourceFile(source_file, self._project, self) for source_file in source_files]) def get_simulator_name(self): """ diff --git a/vunit/ui/library.py b/vunit/ui/library.py index 0c350617a..655f7e1ad 100644 --- a/vunit/ui/library.py +++ b/vunit/ui/library.py @@ -314,9 +314,7 @@ def test_bench(self, name): """ name = name.lower() - return TestBench( - self._test_bench_list.get_test_bench(self._library_name, name), self - ) + return TestBench(self._test_bench_list.get_test_bench(self._library_name, name), self) def get_test_benches(self, pattern="*", allow_empty=False): """ @@ -327,9 +325,7 @@ def get_test_benches(self, pattern="*", allow_empty=False): :returns: A list of :class:`.TestBench` objects """ results = [] - for test_bench in self._test_bench_list.get_test_benches_in_library( - self._library_name - ): + for test_bench in self._test_bench_list.get_test_benches_in_library(self._library_name): if not fnmatch(Path(test_bench.name).resolve(), pattern): continue diff --git a/vunit/ui/packagefacade.py b/vunit/ui/packagefacade.py index 2123deb5a..8d675d097 100644 --- a/vunit/ui/packagefacade.py +++ b/vunit/ui/packagefacade.py @@ -23,9 +23,7 @@ def __init__(self, parent, library_name, package_name, design_unit): self._package_name = package_name self._design_unit = design_unit - def generate_codecs( - self, codec_package_name=None, used_packages=None, output_file_name=None - ): + def generate_codecs(self, codec_package_name=None, used_packages=None, output_file_name=None): """ Generates codecs for the datatypes in this Package """ @@ -37,8 +35,6 @@ def generate_codecs( file_extension = Path(self._design_unit.source_file.name).suffix output_file_name = codecs_path / (codec_package_name + file_extension) - codec_generator.generate_codecs( - self._design_unit, codec_package_name, used_packages, output_file_name - ) + codec_generator.generate_codecs(self._design_unit, codec_package_name, used_packages, output_file_name) return self._parent.add_source_files(output_file_name, self._library_name) diff --git a/vunit/ui/results.py b/vunit/ui/results.py index 2c87f8a57..c0d5ee987 100644 --- a/vunit/ui/results.py +++ b/vunit/ui/results.py @@ -39,9 +39,7 @@ def get_report(self): :returns: A :class:`Report` object """ report = Report(self._output_path) - for ( - test - ) in self._report._test_results_in_order(): # pylint: disable=protected-access + for test in self._report._test_results_in_order(): # pylint: disable=protected-access obj = test.to_dict() report.tests.update( { @@ -94,9 +92,7 @@ def post_func(results): vu.main(post_run=post_func) """ - def __init__( - self, test_output_path: Union[str, Path], status, time, path: Union[str, Path] - ): + def __init__(self, test_output_path: Union[str, Path], status, time, path: Union[str, Path]): self._test_output_path = Path(test_output_path) self.status = status self.time = time @@ -108,8 +104,4 @@ def relpath(self) -> str: If the path is a subdir to the default TEST_OUTPUT_PATH, return the subdir only """ base = self.path.name - return str( - base - if (self._test_output_path / base).resolve() == self.path.resolve() - else self.path - ) + return str(base if (self._test_output_path / base).resolve() == self.path.resolve() else self.path) diff --git a/vunit/ui/source.py b/vunit/ui/source.py index e619a9fad..3cd6e4f02 100644 --- a/vunit/ui/source.py +++ b/vunit/ui/source.py @@ -143,12 +143,8 @@ def add_dependency_on(self, source_file): my_file.add_dependency_on(other_files) """ if isinstance(source_file, SourceFile): - private_source_file = ( - source_file._source_file # pylint: disable=protected-access - ) - self._project.add_manual_dependency( - self._source_file, depends_on=private_source_file - ) + private_source_file = source_file._source_file # pylint: disable=protected-access + self._project.add_manual_dependency(self._source_file, depends_on=private_source_file) elif hasattr(source_file, "__iter__"): for element in source_file: self.add_dependency_on(element) diff --git a/vunit/vhdl/check/run.py b/vunit/vhdl/check/run.py index 2818ea78e..b4e68fe2d 100644 --- a/vunit/vhdl/check/run.py +++ b/vunit/vhdl/check/run.py @@ -18,16 +18,12 @@ VU = VUnit.from_argv() LIB = VU.add_library("lib") -LIB.add_source_files( - Path(ROOT) / "vunit" / "vhdl" / "check" / "test" / "test_support.vhd" -) +LIB.add_source_files(Path(ROOT) / "vunit" / "vhdl" / "check" / "test" / "test_support.vhd") VU.add_library("logging_tb_lib").add_source_files( Path(ROOT) / "vunit" / "vhdl" / "logging" / "test" / "test_support_pkg.vhd" ) -for file_name in glob( - str(Path(ROOT) / "vunit" / "vhdl" / "check" / "test" / "tb_*.vhd") -): +for file_name in glob(str(Path(ROOT) / "vunit" / "vhdl" / "check" / "test" / "tb_*.vhd")): if VU.vhdl_standard not in ["2008", "2019"] and file_name.endswith("2008p.vhd"): continue @@ -38,8 +34,6 @@ TB_CHECK = LIB.entity("tb_check") TB_CHECK.add_config(generics=dict(use_check_not_check_true=True), name="using check") -TB_CHECK.add_config( - generics=dict(use_check_not_check_true=False), name="using check_true" -) +TB_CHECK.add_config(generics=dict(use_check_not_check_true=False), name="using check_true") VU.main() diff --git a/vunit/vhdl/check/tools/generate_check_equal.py b/vunit/vhdl/check/tools/generate_check_equal.py index 132424722..309a18922 100644 --- a/vunit/vhdl/check/tools/generate_check_equal.py +++ b/vunit/vhdl/check/tools/generate_check_equal.py @@ -558,11 +558,7 @@ def dual_format(base_type, got_or_expected): expected_or_got = "expected" if got_or_expected == "got" else "got" if base_type in ["unsigned", "signed", "std_logic_vector"]: - return ( - 'to_nibble_string(%s) & " (" & ' % got_or_expected - + "to_integer_string(%s) & " % got_or_expected - + '")"' - ) + return 'to_nibble_string(%s) & " (" & ' % got_or_expected + "to_integer_string(%s) & " % got_or_expected + '")"' return ( 'to_string(%s) & " (" & ' % got_or_expected @@ -580,9 +576,7 @@ def generate_impl(): impl = "" for c in combinations: t = Template(impl_template) - if (c[0] in ["unsigned", "signed", "std_logic_vector"]) or ( - c[1] in ["unsigned", "signed", "std_logic_vector"] - ): + if (c[0] in ["unsigned", "signed", "std_logic_vector"]) or (c[1] in ["unsigned", "signed", "std_logic_vector"]): got_str = dual_format(c[0], "got") expected_str = dual_format(c[1], "expected") else: @@ -781,9 +775,7 @@ def replace_region(region_name, file_name, new_contents): if not inside_region: result += line + "\n" - if previous_line.startswith(" -- %s" % region_name) and line.startswith( - " ----------" - ): + if previous_line.startswith(" -- %s" % region_name) and line.startswith(" ----------"): assert not found_region inside_region = True @@ -806,9 +798,7 @@ def main(): str(Path(__file__).parent.parent / "src" / "check.vhd"), generate_impl(), ) - with (Path(__file__).parent.parent / "test" / "tb_check_equal.vhd").open( - "wb" - ) as fptr: + with (Path(__file__).parent.parent / "test" / "tb_check_equal.vhd").open("wb") as fptr: fptr.write(generate_test().encode()) diff --git a/vunit/vhdl/check/tools/generate_check_match.py b/vunit/vhdl/check/tools/generate_check_match.py index e31ca610e..77b3553aa 100644 --- a/vunit/vhdl/check/tools/generate_check_match.py +++ b/vunit/vhdl/check/tools/generate_check_match.py @@ -344,11 +344,7 @@ def dual_format(base_type, got_or_expected): expected_or_got = "expected" if got_or_expected == "got" else "got" if base_type in ["unsigned", "signed", "std_logic_vector"]: - return ( - 'to_nibble_string(%s) & " (" & ' % got_or_expected - + "to_integer_string(%s) & " % got_or_expected - + '")"' - ) + return 'to_nibble_string(%s) & " (" & ' % got_or_expected + "to_integer_string(%s) & " % got_or_expected + '")"' return ( 'to_string(%s) & " (" & ' % got_or_expected @@ -366,9 +362,7 @@ def generate_impl(): impl = "" for c in combinations: t = Template(impl_template) - if (c[0] in ["unsigned", "signed", "std_logic_vector"]) or ( - c[1] in ["unsigned", "signed", "std_logic_vector"] - ): + if (c[0] in ["unsigned", "signed", "std_logic_vector"]) or (c[1] in ["unsigned", "signed", "std_logic_vector"]): got_str = dual_format(c[0], "got") expected_str = dual_format(c[1], "expected") else: @@ -466,9 +460,7 @@ def main(): check_file_name = str(Path(__file__).parent.parent / "src" / "check.vhd") replace_region("check_match", check_file_name, generate_impl()) - with (Path(__file__).parent.parent / "test" / "tb_check_match.vhd").open( - "wb" - ) as fptr: + with (Path(__file__).parent.parent / "test" / "tb_check_match.vhd").open("wb") as fptr: fptr.write(generate_test().encode()) diff --git a/vunit/vhdl/logging/run.py b/vunit/vhdl/logging/run.py index 6a66ab55d..ed48e363b 100644 --- a/vunit/vhdl/logging/run.py +++ b/vunit/vhdl/logging/run.py @@ -24,9 +24,7 @@ def main(): preprocessor = location_preprocessor.LocationPreprocessor() preprocessor.add_subprogram("print_pre_vhdl_2019_style") preprocessor.remove_subprogram("info") - vunit_lib.add_source_files( - root / "test" / "tb_location.vhd", preprocessors=[preprocessor] - ) + vunit_lib.add_source_files(root / "test" / "tb_location.vhd", preprocessors=[preprocessor]) if vhdl_2019: testbenches = vunit_lib.get_source_files("*tb*") diff --git a/vunit/vhdl/verification_components/run.py b/vunit/vhdl/verification_components/run.py index 8a0b4c1d8..068b209d1 100644 --- a/vunit/vhdl/verification_components/run.py +++ b/vunit/vhdl/verification_components/run.py @@ -22,9 +22,7 @@ def encode(tb_cfg): def gen_wb_tests(obj, *args): - for dat_width, num_cycles, strobe_prob, ack_prob, stall_prob, slave_inst in product( - *args - ): + for dat_width, num_cycles, strobe_prob, ack_prob, stall_prob, slave_inst in product(*args): tb_cfg = dict( dat_width=dat_width, # TODO remove fixed addr @@ -81,9 +79,7 @@ def gen_avalon_master_tests(obj, *args): if test.name == "wr single rd single": gen_avalon_master_tests(test, [1], [1.0], [0.0], [1.0], [1.0]) else: - gen_avalon_master_tests( - test, [64], [1.0, 0.3], [0.0, 0.7], [1.0, 0.3], [1.0, 0.3] - ) + gen_avalon_master_tests(test, [64], [1.0, 0.3], [0.0, 0.7], [1.0, 0.3], [1.0, 0.3]) TB_WISHBONE_SLAVE = LIB.test_bench("tb_wishbone_slave") @@ -138,8 +134,7 @@ def gen_avalon_master_tests(obj, *args): for user_length in [0, 8]: for test in TB_AXI_STREAM.get_tests("*check"): test.add_config( - name="id_l=%d dest_l=%d user_l=%d" - % (id_length, dest_length, user_length), + name="id_l=%d dest_l=%d user_l=%d" % (id_length, dest_length, user_length), generics=dict( g_id_length=id_length, g_dest_length=dest_length, @@ -151,23 +146,17 @@ def gen_avalon_master_tests(obj, *args): for data_length in [0, 8]: for test in TB_AXI_STREAM_PROTOCOL_CHECKER.get_tests("*passing*tdata*"): - test.add_config( - name="data_length=%d" % data_length, generics=dict(data_length=data_length) - ) + test.add_config(name="data_length=%d" % data_length, generics=dict(data_length=data_length)) for test in TB_AXI_STREAM_PROTOCOL_CHECKER.get_tests("*failing*tid width*"): test.add_config(name="dest_length=25", generics=dict(dest_length=25)) - test.add_config( - name="id_length=8 dest_length=17", generics=dict(id_length=8, dest_length=17) - ) + test.add_config(name="id_length=8 dest_length=17", generics=dict(id_length=8, dest_length=17)) TEST_FAILING_MAX_WAITS = TB_AXI_STREAM_PROTOCOL_CHECKER.test( "Test failing check of that tready comes within max_waits after valid" ) for max_waits in [0, 8]: - TEST_FAILING_MAX_WAITS.add_config( - name="max_waits=%d" % max_waits, generics=dict(max_waits=max_waits) - ) + TEST_FAILING_MAX_WAITS.add_config(name="max_waits=%d" % max_waits, generics=dict(max_waits=max_waits)) TB_AXI_STREAM.test("test random stall on master").add_config( name="stall_master", generics=dict(g_stall_percentage_master=30) diff --git a/vunit/vhdl_parser.py b/vunit/vhdl_parser.py index 8f76ae9e4..d3f0ea05c 100644 --- a/vunit/vhdl_parser.py +++ b/vunit/vhdl_parser.py @@ -63,9 +63,7 @@ def __init__( # pylint: disable=too-many-arguments self.package_bodies = [] if package_bodies is None else package_bodies self.architectures = [] if architectures is None else architectures self.contexts = [] if contexts is None else contexts - self.component_instantiations = ( - [] if component_instantiations is None else component_instantiations - ) + self.component_instantiations = [] if component_instantiations is None else component_instantiations self.configurations = [] if configurations is None else configurations self.references = [] if references is None else references @@ -246,9 +244,7 @@ def _find_normal_packages(cls, code): if match: yield cls.parse(sub_code[: match.end()]) - _package_instance_re = re.compile( - "^" + PACKAGE_INSTANCE_PATTERN, re.MULTILINE | re.IGNORECASE - ) + _package_instance_re = re.compile("^" + PACKAGE_INSTANCE_PATTERN, re.MULTILINE | re.IGNORECASE) @classmethod def _find_package_instances(cls, code): @@ -419,11 +415,7 @@ def _find_generic_clause(cls, code): match_semicolon = semicolon.match(code[match.end() + closing_pos :]) if match_semicolon: return cls._parse_generic_clause( - code[ - match.start() : match.end() - + closing_pos - + match_semicolon.end() - ] + code[match.start() : match.end() + closing_pos + match_semicolon.end()] ) return [] @@ -455,13 +447,7 @@ def _find_port_clause(cls, code): ) match_semicolon = semicolon.match(code[match.end() + closing_pos :]) if match_semicolon: - return cls._parse_port_clause( - code[ - match.start() : match.end() - + closing_pos - + match_semicolon.end() - ] - ) + return cls._parse_port_clause(code[match.start() : match.end() + closing_pos + match_semicolon.end()]) return [] @staticmethod @@ -506,9 +492,7 @@ def _split_not_in_par(string, sep): _package_generic_re = re.compile(r"\s*package\s+", re.MULTILINE | re.IGNORECASE) _type_generic_re = re.compile(r"\s*type\s+", re.MULTILINE | re.IGNORECASE) - _function_generic_re = re.compile( - r"\s*(impure\s+)?(function|procedure)\s+", re.MULTILINE | re.IGNORECASE - ) + _function_generic_re = re.compile(r"\s*(impure\s+)?(function|procedure)\s+", re.MULTILINE | re.IGNORECASE) @classmethod def _parse_generic_clause(cls, code): @@ -555,9 +539,7 @@ def _parse_port_clause(cls, code): port_list = [] # Add interface elements to the port list for interface_element in interface_elements: - port_list.append( - VHDLInterfaceElement.parse(interface_element, is_signal=True) - ) + port_list.append(VHDLInterfaceElement.parse(interface_element, is_signal=True)) return port_list @@ -646,9 +628,7 @@ def without_mode(self): """ @returns A copy of this interface element without a mode """ - return VHDLInterfaceElement( - self.identifier, self.subtype_indication, init_value=self.init_value - ) + return VHDLInterfaceElement(self.identifier, self.subtype_indication, init_value=self.init_value) @classmethod def parse(cls, code, is_signal=False): @@ -672,9 +652,7 @@ def parse(cls, code, is_signal=False): subtype_indication = VHDLSubtypeIndication.parse(mode_split[1]) else: mode = None - subtype_indication = VHDLSubtypeIndication.parse( - interface_element_string.split(":")[1].strip() - ) + subtype_indication = VHDLSubtypeIndication.parse(interface_element_string.split(":")[1].strip()) # Extract initial value init_value_split = interface_element_string.split(":=") @@ -780,16 +758,9 @@ def find(cls, code): for element in elements: if ":" in element: identifier_list_and_subtype_indication = element.split(":") - identifier_list = [ - i.strip() - for i in identifier_list_and_subtype_indication[0].split(",") - ] - subtype_indication = VHDLSubtypeIndication.parse( - identifier_list_and_subtype_indication[1].strip() - ) - parsed_elements.append( - VHDLElementDeclaration(identifier_list, subtype_indication) - ) + identifier_list = [i.strip() for i in identifier_list_and_subtype_indication[0].split(",")] + subtype_indication = VHDLSubtypeIndication.parse(identifier_list_and_subtype_indication[1].strip()) + parsed_elements.append(VHDLElementDeclaration(identifier_list, subtype_indication)) yield cls(identifier, parsed_elements) @@ -883,9 +854,7 @@ def find(cls, code): """Iterate over new instances of VHDLArrayType for all array types within the code""" for array_type in cls._array_declaration_re.finditer(code): identifier = array_type.group("id") - subtype_indication = VHDLSubtypeIndication.parse( - array_type.group("subtype_indication") - ) + subtype_indication = VHDLSubtypeIndication.parse(array_type.group("subtype_indication")) ranges = array_type.group("ranges") range1_str, range2_str = cls._split_ranges(ranges) range1 = cls._parse_range(range1_str) @@ -957,9 +926,7 @@ def find_closing_delimiter(start, end, code): if count == 0: return delimiter.end() - raise ValueError( - "Failed to find closing delimiter to " + start + " in " + code + "." - ) + raise ValueError("Failed to find closing delimiter to " + start + " in " + code + ".") class VHDLReference(object): @@ -1012,9 +979,7 @@ def get_ids(match): names_within = uses[2:] if len(uses) > 2 else (None,) for name_within in names_within: ref = cls( - reference_type="package" - if match.group("use_type") == "use" - else "context", + reference_type="package" if match.group("use_type") == "use" else "context", library=uses[0], design_unit=uses[1], name_within=name_within, @@ -1060,14 +1025,10 @@ def _find_configuration_references(cls, code): """ references = [] for match in cls._configuration_reference_re.finditer(code): - references.append( - cls("configuration", match.group("lib"), match.group("cfg")) - ) + references.append(cls("configuration", match.group("lib"), match.group("cfg"))) return references - _package_instance_re = re.compile( - PACKAGE_INSTANCE_PATTERN, re.MULTILINE | re.IGNORECASE - ) + _package_instance_re = re.compile(PACKAGE_INSTANCE_PATTERN, re.MULTILINE | re.IGNORECASE) @classmethod def _find_package_instance_references(cls, code): @@ -1117,9 +1078,7 @@ def __eq__(self, other): ) def copy(self): - return VHDLReference( - self.reference_type, self.library, self.design_unit, self.name_within - ) + return VHDLReference(self.reference_type, self.library, self.design_unit, self.name_within) def is_entity_reference(self): return self.reference_type == "entity" diff --git a/vunit/vhdl_standard.py b/vunit/vhdl_standard.py index f054a1f40..ba09b691b 100644 --- a/vunit/vhdl_standard.py +++ b/vunit/vhdl_standard.py @@ -36,9 +36,7 @@ def __eq__(self, other): return False def __lt__(self, other): - return int(self._standard) < int( - other._standard # pylint: disable=protected-access - ) + return int(self._standard) < int(other._standard) # pylint: disable=protected-access def __str__(self): if self == VHDL.STD_1993: diff --git a/vunit/vivado/vivado.py b/vunit/vivado/vivado.py index e01c2ae18..170ef5613 100644 --- a/vunit/vivado/vivado.py +++ b/vunit/vivado/vivado.py @@ -13,9 +13,7 @@ from pathlib import Path -def add_from_compile_order_file( - vunit_obj, compile_order_file, dependency_scan_defaultlib=True -): +def add_from_compile_order_file(vunit_obj, compile_order_file, dependency_scan_defaultlib=True): """ Add Vivado IP:s from a compile order file """ @@ -35,9 +33,7 @@ def add_from_compile_order_file( # Optionally use VUnit dependency scanning for everything in xil_defaultlib, which # typically contains unencrypted top levels that instantiate encrypted implementations. - scan_dependencies = ( - dependency_scan_defaultlib and library_name == "xil_defaultlib" - ) + scan_dependencies = dependency_scan_defaultlib and library_name == "xil_defaultlib" source_file = vunit_obj.library(library_name).add_source_file( file_name, no_parse=not scan_dependencies, @@ -67,10 +63,7 @@ def create_compile_order_file(project_file, compile_order_file, vivado_path=None """ Create compile file from Vivado project """ - print( - "Generating Vivado project compile order into %s ..." - % str(Path(compile_order_file).resolve()) - ) + print("Generating Vivado project compile order into %s ..." % str(Path(compile_order_file).resolve())) fpath = Path(compile_order_file) if not fpath.parent.exists(): @@ -121,14 +114,8 @@ def run_vivado(tcl_file_name, tcl_args=None, cwd=None, vivado_path=None): Note: the shell=True is important in windows where Vivado is just a bat file. """ - vivado = ( - "vivado" - if vivado_path is None - else str(Path(vivado_path).resolve() / "bin" / "vivado") - ) - cmd = "{} -nojournal -nolog -notrace -mode batch -source {}".format( - vivado, str(Path(tcl_file_name).resolve()) - ) + vivado = "vivado" if vivado_path is None else str(Path(vivado_path).resolve() / "bin" / "vivado") + cmd = "{} -nojournal -nolog -notrace -mode batch -source {}".format(vivado, str(Path(tcl_file_name).resolve())) if tcl_args is not None: cmd += " -tclargs " + " ".join([str(val) for val in tcl_args]) diff --git a/vunit/vunit_cli.py b/vunit/vunit_cli.py index 70b7ae16c..94f7bd072 100644 --- a/vunit/vunit_cli.py +++ b/vunit/vunit_cli.py @@ -80,9 +80,7 @@ def _create_argument_parser(description=None, for_documentation=False): parser = argparse.ArgumentParser(description=description) - parser.add_argument( - "test_patterns", metavar="tests", nargs="*", default="*", help="Tests to run" - ) + parser.add_argument("test_patterns", metavar="tests", nargs="*", default="*", help="Tests to run") parser.add_argument( "--with-attributes", @@ -151,9 +149,7 @@ def _create_argument_parser(description=None, for_documentation=False): help="Only elaborate test benches without running", ) - parser.add_argument( - "--clean", action="store_true", default=False, help="Remove output path first" - ) + parser.add_argument("--clean", action="store_true", default=False, help="Remove output path first") parser.add_argument( "-o", @@ -162,9 +158,7 @@ def _create_argument_parser(description=None, for_documentation=False): help="Output path for compilation and simulation artifacts", ) - parser.add_argument( - "-x", "--xunit-xml", default=None, help="Xunit test report .xml file" - ) + parser.add_argument("-x", "--xunit-xml", default=None, help="Xunit test report .xml file") parser.add_argument( "--xunit-xml-format", @@ -192,10 +186,7 @@ def _create_argument_parser(description=None, for_documentation=False): "--dont-catch-exceptions", default=False, action="store_true", - help=( - "Let exceptions bubble up all the way. " - 'Useful when running with "python -m pdb".' - ), + help=("Let exceptions bubble up all the way. " 'Useful when running with "python -m pdb".'), ) parser.add_argument( @@ -214,9 +205,7 @@ def _create_argument_parser(description=None, for_documentation=False): help="Do not print test output even in the case of failure", ) - parser.add_argument( - "--no-color", action="store_true", default=False, help="Do not color output" - ) + parser.add_argument("--no-color", action="store_true", default=False, help="Do not color output") parser.add_argument( "--log-level", @@ -231,8 +220,7 @@ def _create_argument_parser(description=None, for_documentation=False): type=positive_int, default=1, help=( - "Number of tests to run in parallel. " - "Test output is not continuously written in verbose mode with p > 1" + "Number of tests to run in parallel. " "Test output is not continuously written in verbose mode with p > 1" ), ) @@ -244,9 +232,7 @@ def _create_argument_parser(description=None, for_documentation=False): help="Do not re-use the same simulator process for running different test cases (slower)", ) - parser.add_argument( - "--export-json", default=None, help="Export project information to a JSON file." - ) + parser.add_argument("--export-json", default=None, help="Export project information to a JSON file.") parser.add_argument("--version", action="version", version=version()) @@ -264,9 +250,7 @@ def positive_int(val): assert ival > 0 return ival except (ValueError, AssertionError) as exv: - raise argparse.ArgumentTypeError( - "'%s' is not a valid positive int" % val - ) from exv + raise argparse.ArgumentTypeError("'%s' is not a valid positive int" % val) from exv def _parser_for_documentation():