diff --git a/.flake8 b/.flake8 index bb823ab..4969764 100644 --- a/.flake8 +++ b/.flake8 @@ -5,6 +5,8 @@ count = True statistics = True -max-line-length = 128 +max-line-length = 100 exclude = .tox,.venv,.env,venv,env,build,dist,doc,a2lparser/gen/ + +extend-ignore = E203 diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3ec3644..0df1346 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,20 +1,9 @@ name: build # Controls when the action will run. -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - branches: [ main ] - pull_request: - branches: [ main ] - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: +on: push jobs: - ############### - # Linux Build # - ############### build-Linux: runs-on: ubuntu-latest diff --git a/.github/workflows/flake8.yml b/.github/workflows/flake8.yml index 83e6d30..4c4609d 100644 --- a/.github/workflows/flake8.yml +++ b/.github/workflows/flake8.yml @@ -36,4 +36,4 @@ jobs: # Performs a flake8 check on the fritzsniffer package and tests - name: Run flake8 run: | - flake8 a2lparser/ --config=.flake8 + flake8 a2lparser/ tests/ --config=.flake8 diff --git a/.github/workflows/publish-to-pypi.yml b/.github/workflows/publish-to-pypi.yml index 78fb986..16ee18c 100644 --- a/.github/workflows/publish-to-pypi.yml +++ b/.github/workflows/publish-to-pypi.yml @@ -30,8 +30,7 @@ jobs: path: dist/ publish-to-pypi: - name: >- - Publish Python 🐍 distribution 📦 to PyPI + name: PyPi - Publish distribution 📦 to PyPI if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes needs: - build @@ -53,7 +52,7 @@ jobs: github-release: name: >- - Sign the Python 🐍 distribution 📦 with Sigstore + Sign the distribution 📦 with Sigstore and upload them to GitHub Release needs: - publish-to-pypi @@ -95,7 +94,7 @@ jobs: --repo '${{ github.repository }}' publish-to-testpypi: - name: Publish Python 🐍 distribution 📦 to TestPyPI + name: TestPyPi - Publish distribution 📦 to TestPyPI needs: - build runs-on: ubuntu-latest @@ -116,4 +115,4 @@ jobs: - name: Publish distribution 📦 to TestPyPI uses: pypa/gh-action-pypi-publish@release/v1 with: - repository-url: https://test.pypi.org/legacy/ \ No newline at end of file + repository-url: https://test.pypi.org/legacy/ diff --git a/README.md b/README.md index bd59ff9..ba0f224 100644 --- a/README.md +++ b/README.md @@ -9,18 +9,20 @@ ## Overview -The Python A2L Parser is a tool designed for reading A2L files compliant with the [ASAM MCD-2 MC](https://www.asam.net/standards/detail/mcd-2-mc/) Data Model for ECU Measurement and Calibration. This parser, implemented in Python using [PLY](https://ply.readthedocs.io/en/latest/index.html), constructs an Abstract Syntax Tree (AST) from A2L files, allowing for structured data access and utility functions like searching. +The Python A2L Parser is a tool designed to parse A2L files compliant with the [ASAM MCD-2 MC](https://www.asam.net/standards/detail/mcd-2-mc/) Data Model for ECU Measurement and Calibration. Implemented in Python using [PLY](https://ply.readthedocs.io/en/latest/index.html), it constructs an Abstract Syntax Tree (AST) from A2L files, enabling structured data access and utility functions such as searching. All resources used in development are sourced from publicly available information, including the [ASAM Wiki](https://www.asam.net/standards/detail/mcd-2-mc/wiki/). -This project supports ASAM MCD-2 MC Version 1.7.1 and focuses on parsing A2L grammar, not providing mapping capabilities. The module also includes functionality for converting parsed A2L files into simpler formats like XML, JSON, and YAML. +The parser supports ASAM MCD-2 MC Version 1.7.1 and is focused on parsing A2L grammar without providing mapping capabilities. Additionally, the module includes functionality for converting parsed A2L files into simpler formats like XML, JSON, and YAML. -You can use this repository to interpret A2L files, build upon this functionality, or for educational purposes. +This repository can be used for interpreting or validating A2L files, extending its functionality, or for educational purposes. -**Note:** This project is released under the GPL license with no warranty and is recommended for educational purposes. For professional solutions, consider exploring specialized tools such as the [MATLAB Vehicle Network Toolbox](https://www.mathworks.com/help/vnt/index.html) or the [Vector ASAP2 Toolset](https://www.vector.com/int/en/products/products-a-z/software/asap2-tool-set/). +**Note:** Released under the GPL license with no warranty, this project is recommended for educational use. For professional solutions, consider specialized tools such as the [MATLAB Vehicle Network Toolbox](https://www.mathworks.com/help/vnt/index.html) or the [Vector ASAP2 Toolset](https://www.vector.com/int/en/products/products-a-z/software/asap2-tool-set/). ## Installation To install the A2L Parser, run: +**Note:** Until I fix some more minor issues and create a release version, a TestPyPi version is uploaded which you can use: + ```console pip install -i https://test.pypi.org/simple/ a2lparser --extra-index-url https://pypi.org/simple/ ``` @@ -32,17 +34,34 @@ from a2lparser.a2lparser import A2LParser from a2lparser.a2lparser_exception import A2LParserException try: - # Create Parser and parse files - ast = A2LParser(quiet=True).parse_file(files="./data/test.a2l") + # Create a parser and parse files. + # Allows multiple files to be passed with wildcards. + # Will only print errors, no information like progressbar. + # Returns a dictionary. + ast_dict = A2LParser(log_level="INFO").parse_file("./testfiles/test_*.a2l") + + # The dictionary holds the AbstractSyntaxTree object under the file name key. + ast = ast_dict["test_1.a2l"] + + # Dictionary access on the abstract syntax tree. + # Returns a Python dictionary. + project = ast["PROJECT"] + module = project["MODULE"] + print(f"Project {project['Name']} with module: {module['Name']}" + + # Searches for all MEASUREMENT sections. + # find_section returns an AbstractSyntaxTree + measurements = ast.find_sections("MEASUREMENT") - # Dictionary access on abstract syntax tree - module = ast["test.a2l"]["PROJECT"]["MODULE"] + # All found MEASUREMENT sections are under the "MEASUREMENT" key + measurements_list = measurements["MEASUREMENT"] - # Searches for all MEASUREMENT sections - measurements = ast.find_sections("MEASUREMENT") - print(measurements) + print(f"Found {len(measurements_list)} MEASUREMENT sections.") except A2LParserException as ex: + # Catching A2LParserException: + # Generally occurs when a fatal error in parsing is encountered, + # or if the generated AST is empty (i.e., no data could be parsed). print(ex) ``` @@ -69,5 +88,5 @@ options: --no-validation Disables possible A2L validation warnings --gen-ast [CONFIG] Generates python file containing AST node classes --log-level {DEBUG,INFO,WARNING,ERROR,CRITICAL} - --version show program's version number and exit ``` + --version show program's version number and exit ``` diff --git a/a2lparser/a2l/a2l_lex.py b/a2lparser/a2l/a2l_lex.py index 46e59d8..ea5b3ab 100644 --- a/a2lparser/a2l/a2l_lex.py +++ b/a2lparser/a2l/a2l_lex.py @@ -186,7 +186,11 @@ def t_NEWLINE(self, t): @TOKEN( r"\b(" - + r"|".join(LexerKeywords.keywords_type + LexerKeywords.keywords_enum + LexerKeywords.keywords_datatypes) + + r"|".join( + LexerKeywords.keywords_type + + LexerKeywords.keywords_enum + + LexerKeywords.keywords_datatypes + ) + r")\b" ) def t_KEYWORD_TYPE(self, t): diff --git a/a2lparser/a2l/a2l_validator.py b/a2lparser/a2l/a2l_validator.py index eccaeb8..7de5864 100644 --- a/a2lparser/a2l/a2l_validator.py +++ b/a2lparser/a2l/a2l_validator.py @@ -75,7 +75,9 @@ def validate(self, a2l_content: str) -> None: elif match.group().lower().startswith("/end"): _, last_section = sections_stack[-1] if last_section != section: - errors.append(f"Detected unexpected end of section on '{line.lstrip()}' at line {i}.") + errors.append( + f"Detected unexpected end of section on '{line.lstrip()}' at line {i}." + ) else: sections_stack.pop() @@ -86,6 +88,14 @@ def validate(self, a2l_content: str) -> None: raise self.A2LValidationError(errors) def _remove_comments(self, line: str) -> str: + """ + Removes comments from a given line of code. + + Args: + line (str): The line of code containing comments. + Returns: + str: The line of code with comments removed. + """ result = [] i = 0 length = len(line) @@ -95,17 +105,17 @@ def _remove_comments(self, line: str) -> str: while i < length: # If inside a comment block, skip characters until the end of the block if skip_tokens: - if line[i:i+2] == '*/': + if line[i : i + 2] == "*/": skip_tokens = False i += 2 continue i += 1 # Detect the start of a multiline comment - elif line[i:i+2] == '/*' and not string_literal_started: + elif line[i : i + 2] == "/*" and not string_literal_started: skip_tokens = True i += 2 # Detect the start of a single line comment - elif line[i:i+2] == '//' and not string_literal_started: + elif line[i : i + 2] == "//" and not string_literal_started: break # Handle string literals properly elif line[i] in {'"', "'"}: @@ -113,7 +123,9 @@ def _remove_comments(self, line: str) -> str: result.append(line[i]) i += 1 string_literal_started = not string_literal_started - while i < length and (line[i] != quote_char or (line[i] == quote_char and line[i-1] == '\\')): + while i < length and ( + line[i] != quote_char or (line[i] == quote_char and line[i - 1] == "\\") + ): result.append(line[i]) i += 1 if i < length: @@ -125,4 +137,4 @@ def _remove_comments(self, line: str) -> str: result.append(line[i]) i += 1 - return ''.join(result) + return "".join(result) diff --git a/a2lparser/a2l/a2l_yacc.py b/a2lparser/a2l/a2l_yacc.py index 6f43cf7..27474de 100644 --- a/a2lparser/a2l/a2l_yacc.py +++ b/a2lparser/a2l/a2l_yacc.py @@ -65,7 +65,10 @@ def __init__( """ super().__init__() self.a2l_lex = A2LLex( - debug=debug, optimize=optimize, generated_files_dir=generated_files_dir, lex_table_file=lex_table_file + debug=debug, + optimize=optimize, + generated_files_dir=generated_files_dir, + lex_table_file=lex_table_file, ) self.tokens = self.a2l_lex.tokens self.experimental_error_resolve = False @@ -114,8 +117,18 @@ def p_error(self, p): if not p: # End of file reached. This section could be used for validation. return - logger.error(f"Syntax error at line {p.lineno} on token \"{p.value}\" in section {self.a2l_lex.current_section}.") + logger.error( + ( + f"Syntax error at line {p.lineno} on token '{p.value}' " + f"in section {self.a2l_lex.current_section}. " + f"No Grammar rule found for this token." + ) + ) + ################################################## + # This is the final rule which defines the end # + # and the root of the parsed content. # + ################################################## def p_abstract_syntax_tree_final(self, p): """ abstract_syntax_tree_final : a2l_final diff --git a/a2lparser/a2l/ast/abstract_syntax_tree.py b/a2lparser/a2l/ast/abstract_syntax_tree.py index 052a411..cd6566f 100644 --- a/a2lparser/a2l/ast/abstract_syntax_tree.py +++ b/a2lparser/a2l/ast/abstract_syntax_tree.py @@ -38,10 +38,10 @@ class AbstractSyntaxTree: class ASTException(Exception): """ - Raised when a fatal error is encountered while trying to generate an Abstract Syntax Tree dictionary. + Raised when a fatal error is encountered while trying to generate an AST dictionary. """ - def __init__(self, abstract_syntax_tree, dictionary: dict = None) -> None: # type: ignore + def __init__(self, abstract_syntax_tree, dictionary: dict = None) -> None: """ AbstractSyntaxTree Constructor. @@ -64,6 +64,71 @@ def __init__(self, abstract_syntax_tree, dictionary: dict = None) -> None: # ty if not bool(dictionary) and abstract_syntax_tree: self._create_dict_from_ast(abstract_syntax_tree) + @staticmethod + def validate_ast(ast) -> bool: + """ + Validates the abstract syntax tree. + """ + if isinstance(ast, ASTNodes.Abstract_Syntax_Tree): + if getattr(ast, "node") is not None: + return True + elif isinstance(ast, (dict, list, tuple)) and len(ast) > 0: + return True + elif hasattr(ast, "children") and callable(getattr(ast, "children")): + return True + return False + + def find_value( + self, search_value: str, case_sensitive: bool = False, exact_match: bool = False + ) -> "AbstractSyntaxTree": + """ + Returns the dictionaries containing the passed value like this: + { "": {found_dictionaries} } + + Args: + search_value: The search value to search for. + case_sensitive: Whether or not the value should be matched case-sensitive. + exact_match: Whether or not the value should be exactly matched. + + Returns: + dict: A dictionary containing the found values. + """ + return AbstractSyntaxTree( + self._ast, self._find_value(search_value, self._dict, case_sensitive, exact_match) + ) + + def find_sections(self, section_name: str) -> "AbstractSyntaxTree": + """ + Returns an AbstractSyntaxTree object containing the sections of the given section_name. + + Args: + section_name (str): The name of the section to search for. + + Returns: + AbstractSyntaxTree: An AbstractSyntaxTree object containing the found sections. + Dictionary access is wrapped through section name as key. + If only one section is found, it is wrapped in a dictionary. + If multiple sections are found, they are wrapped in a list. + """ + sections = [] + section_name = section_name.upper() + for key, value in self._dict.items(): + if key.upper() == section_name: + sections.append(value) + elif isinstance(value, dict): + if section := AbstractSyntaxTree(self._ast, value).find_sections(section_name): + if isinstance(section[section_name], list): + sections.extend(section[section_name]) + else: + sections.append(section[section_name]) + if len(sections) == 1: + return AbstractSyntaxTree( + abstract_syntax_tree=self._ast, dictionary={section_name: sections[0]} + ) + return AbstractSyntaxTree( + abstract_syntax_tree=self._ast, dictionary={section_name: sections} + ) + def __getitem__(self, key): return self._dict[key] @@ -94,87 +159,104 @@ def __eq__(self, other): def keys(self): """ - returns a view object that contains the keys of the object. + Returns a view object that contains the keys of the object. """ return self._dict.keys() def values(self): """ - this method returns a view object that contains the values of the object. + This method returns a view object that contains the values of the object. """ return self._dict.values() def items(self): """ - this method returns a view object that contains the key-value pairs of the object. + This method returns a view object that contains the key-value pairs of the object. """ return self._dict.items() def update(self, other): """ - updates the dictionary + Updates the dictionary """ self._dict.update(other) def clear(self): """ - clears the dictionary. + Clears the dictionary. """ self._dict.clear() def get_dict(self): """ - returns a reference to the dictionary object. + Returns a reference to the dictionary object. """ return self._dict - @staticmethod - def validate_ast(ast) -> bool: - """ - Validates the abstract syntax tree. - """ - if isinstance(ast, ASTNodes.Abstract_Syntax_Tree): - if getattr(ast, "node") is not None: - return True - elif isinstance(ast, (dict, list, tuple)) and len(ast) > 0: - return True - elif hasattr(ast, "children") and callable(getattr(ast, "children")): - return True - return False - - def find_sections(self, section_name: str) -> "AbstractSyntaxTree": - """ - Returns an AbstractSyntaxTree object containing the sections of the given section_name - """ - sections = [] - section_name = section_name.upper() - for key, value in self._dict.items(): - if key.upper() == section_name: - sections.append(value) - elif isinstance(value, dict): - if section := AbstractSyntaxTree(self._ast, value).find_sections(section_name): - if isinstance(section[section_name], list): - sections.extend(section[section_name]) - else: - sections.append(section[section_name]) - if len(sections) == 1: - return AbstractSyntaxTree(abstract_syntax_tree=self._ast, dictionary={section_name: sections[0]}) - return AbstractSyntaxTree(abstract_syntax_tree=self._ast, dictionary={section_name: sections}) - - def find_value(self, search_value: str, case_sensitive: bool = False, exact_match: bool = False) -> "AbstractSyntaxTree": + def _print_dict(self, dictionary, indent=""): """ - Returns the dictionaries containing the passed value like this: - { "": {found_dictionaries} } + Recursively prints a dictionary in a tree-like structure, similar to the Unix tree command. Args: - search_value: The search value to search for. - case_sensitive: Whether or not the value should be matched case-sensitive. - exact_match: Whether or not the value should be exactly matched. + dictionary (dict): The dictionary to be printed. + indent (str, optional): The indentation string to be used for each level of the tree. + Defaults to an empty string. + + Returns: + str: The string representation of the dictionary in the tree-like structure. + + Examples: + >>> dictionary = {'a': {'b': 1, 'c': 2}, 'd': [{'e': 3}, {'f': 4}]} + >>> print(_print_dict(dictionary)) + . + ├── a: + │ ├── b: 1 + │ ├── c: 2 + │ └── d: + │ ├── e: 3 + │ └── f: 4 + └── d: + ├── e: 3 + └── f: 4 """ - return AbstractSyntaxTree(self._ast, self._find_value(search_value, self._dict, case_sensitive, exact_match)) + if not isinstance(dictionary, dict): + return f"{indent}└── {dictionary}" + result = "" + for i, (key, value) in enumerate(dictionary.items()): + if isinstance(value, dict): + if i == len(dictionary) - 1: + result += f"\n{indent}└── {key}:" + result += self._print_dict(value, f"{indent} ") + else: + result += f"\n{indent}├── {key}:" + result += self._print_dict(value, f"{indent}│ ") + elif isinstance(value, list): + for item in value: + if isinstance(item, dict): + if i == len(dictionary) - 1: + result += f"\n{indent}└── {key}:" + result += self._print_dict(item, f"{indent} ") + else: + result += f"\n{indent}├── {key}:" + result += self._print_dict(item, f"{indent}│ ") + else: + result += ( + f"\n{indent}└── {key}: {item}" + if i == len(dictionary) - 1 + else f"\n{indent}├── {key}: {item}" + ) + elif i == len(dictionary) - 1: + result += f"\n{indent}└── {key}: {value}" + else: + result += f"\n{indent}├── {key}: {value}" + return result def _find_value( - self, search_expression: Union[str, re.Pattern], dictionary: dict, case_sensitive: bool, exact_match: bool + self, + search_expression: Union[str, re.Pattern], + dictionary: dict, + case_sensitive: bool, + exact_match: bool, ) -> dict: """ Searches recursively for a search expression under the values of the given dictionary. @@ -184,11 +266,16 @@ def _find_value( dictionary: Expects a dctionary to be passed. case_sensitive: Whether or not the value should be matched case-sensitive. exact_match: Whether or not the value should be exactly matched. + + Returns: + dict: A dictionary containing the found values. """ result = {} if isinstance(dictionary, dict): for key, value in dictionary.items(): - if self._is_value_match(value, search_expression, case_sensitive, exact_match, True): + if self._is_value_match( + value, search_expression, case_sensitive, exact_match, True + ): if key in result: if not isinstance(result[key], list): result[key] = [result[key]] @@ -197,7 +284,9 @@ def _find_value( if isinstance(value, list): for obj in value: if isinstance(obj, dict): - if section := self._find_value(search_expression, obj, case_sensitive, exact_match): + if section := self._find_value( + search_expression, obj, case_sensitive, exact_match + ): if key in result: if isinstance(result[key], list): result[key].append(section) @@ -205,14 +294,20 @@ def _find_value( result[key] = section else: result[key] = [result[key], section] - if self._is_value_match(value, search_expression, case_sensitive, exact_match, False): + if self._is_value_match( + value, search_expression, case_sensitive, exact_match, False + ): if isinstance(value, dict): - if section := self._find_value(search_expression, value, case_sensitive, exact_match): + if section := self._find_value( + search_expression, value, case_sensitive, exact_match + ): result[key] = section else: result[key] = value elif isinstance(value, dict): - if section := self._find_value(search_expression, value, case_sensitive, exact_match): + if section := self._find_value( + search_expression, value, case_sensitive, exact_match + ): result[key] = section elif isinstance(value, dict): self._find_value(search_expression, value, case_sensitive, exact_match) @@ -247,31 +342,55 @@ def _is_value_match( values = str(value) if isinstance(values, str): - if self._is_value_match_expression(values, search_expression, case_sensitive, exact_match): + if self._is_value_match_expression( + values, search_expression, case_sensitive, exact_match + ): return True elif not recursive_search and isinstance(values, list): for obj in values: - if not isinstance(obj, (dict, list, tuple, AbstractSyntaxTree)) and self._is_value_match_expression( + if not isinstance( + obj, (dict, list, tuple, AbstractSyntaxTree) + ) and self._is_value_match_expression( str(obj), search_expression, case_sensitive, exact_match ): return True elif recursive_search and isinstance(values, list): for obj in values: - if self._is_value_match(obj, search_expression, case_sensitive, exact_match, recursive_search): + if self._is_value_match( + obj, search_expression, case_sensitive, exact_match, recursive_search + ): return True return False def _is_value_match_expression( - self, value_string: str, search_expression: Union[str, re.Pattern], case_sensitive: bool, exact_match: bool + self, + value_string: str, + search_expression: Union[str, re.Pattern], + case_sensitive: bool, + exact_match: bool, ): """ - Checks if the given string matches the given search expression + Checks if the given value string matches the search expression. + + Args: + value_string (str): The value string to be checked. + search_expression (Union[str, re.Pattern]): The expression to search for. + case_sensitive (bool): Whether the search should be case-sensitive. + exact_match (bool): Whether the search should be an exact match. + + Returns: + bool: True if the value string matches the search expression, False otherwise. """ if isinstance(search_expression, str): if not case_sensitive: search_expression = search_expression.lower() value_string = value_string.lower() - if exact_match and value_string == search_expression or not exact_match and search_expression in value_string: + if ( + exact_match + and value_string == search_expression + or not exact_match + and search_expression in value_string + ): return True elif isinstance(search_expression, re.Pattern): return search_expression.search(value_string) is not None @@ -286,7 +405,15 @@ def _is_node_name_keyword( + LexerKeywords.keywords_datatypes, ) -> bool: """ - Returns true if the given node name is a A2L keyword. + Check if the given node name is a keyword. + + Args: + node_name (str): The name of the node. + keywords (list, optional): The list of keywords. Defaults to the combination of + keywords_type, keywords_enum, keywords_section, and keywords_datatypes. + + Returns: + bool: True if the node name is a keyword, False otherwise. """ return any(node_name.upper() == keyword for keyword in keywords) @@ -316,7 +443,12 @@ def _create_dict_from_ast(self, abstract_syntax_tree) -> None: def _add_children(self, node, parent_dict): """ - Will recursively add the children of the given node to the given parent dictionary. + Recursively adds children nodes to the parent dictionary based on the given node. + + Args: + self: The object instance. + node: The current node to process. + parent_dict: The dictionary representing the parent node. """ if not hasattr(node, "children"): return @@ -363,35 +495,3 @@ def _add_children(self, node, parent_dict): child_dict[attr_name] = attr_value self._add_children(child_obj, child_dict) - - def _print_dict(self, dictionary, indent=""): - """ - Prints the AST dictionary in the style of the unix tree command. - """ - if not isinstance(dictionary, dict): - return f"{indent}└── {dictionary}" - result = "" - for i, (key, value) in enumerate(dictionary.items()): - if isinstance(value, dict): - if i == len(dictionary) - 1: - result += f"\n{indent}└── {key}:" - result += self._print_dict(value, f"{indent} ") - else: - result += f"\n{indent}├── {key}:" - result += self._print_dict(value, f"{indent}│ ") - elif isinstance(value, list): - for item in value: - if isinstance(item, dict): - if i == len(dictionary) - 1: - result += f"\n{indent}└── {key}:" - result += self._print_dict(item, f"{indent} ") - else: - result += f"\n{indent}├── {key}:" - result += self._print_dict(item, f"{indent}│ ") - else: - result += f"\n{indent}└── {key}: {item}" if i == len(dictionary) - 1 else f"\n{indent}├── {key}: {item}" - elif i == len(dictionary) - 1: - result += f"\n{indent}└── {key}: {value}" - else: - result += f"\n{indent}├── {key}: {value}" - return result diff --git a/a2lparser/a2l/ast/ast_generator.py b/a2lparser/a2l/ast/ast_generator.py index b6c485d..da42426 100644 --- a/a2lparser/a2l/ast/ast_generator.py +++ b/a2lparser/a2l/ast/ast_generator.py @@ -120,7 +120,9 @@ def __init__(self, cfg_filename: str, out_filename: str) -> None: """ self.cfg_filename = cfg_filename self.out_filename = out_filename - self.node_config = [NodeConfiguration(name, content) for (name, content) in self.parse_config()] + self.node_config = [ + NodeConfiguration(name, content) for (name, content) in self.parse_config() + ] def generate(self, use_clean_names: bool = True) -> None: """ @@ -149,11 +151,18 @@ def parse_config(self): colon_i = line.find(":") left_parenthesis_i = line.find("(") right_parenthesis_i = line.find(")") - if colon_i < 1 or left_parenthesis_i <= colon_i or right_parenthesis_i <= left_parenthesis_i: - raise RuntimeError(f"Invalid line in {self.cfg_filename}:\n{line}\n" % (self.cfg_filename, line)) + if ( + colon_i < 1 + or left_parenthesis_i <= colon_i + or right_parenthesis_i <= left_parenthesis_i + ): + raise RuntimeError( + f"Invalid line in {self.cfg_filename}:\n{line}\n" + % (self.cfg_filename, line) + ) name = line[:colon_i] - val = line[left_parenthesis_i + 1: right_parenthesis_i] + val = line[left_parenthesis_i + 1 : right_parenthesis_i] vallist = [v.strip() for v in val.split(",")] if val else [] yield name, vallist @@ -235,7 +244,9 @@ def __init__(self, AxisPoints): if "_" in clean_name: index = [i for i, ltr in enumerate(clean_name) if ltr == "_"] indices = [i + 1 for i in index] - clean_name = "".join(c.upper() if i in indices else c for i, c in enumerate(clean_name)) + clean_name = "".join( + c.upper() if i in indices else c for i, c in enumerate(clean_name) + ) self.node_name = clean_name @@ -254,12 +265,8 @@ def __init__(self, AxisPoints): arguments_list += ")" - for entry in self.entries: - clean_entries.append(entry.strip("?")) - - slots = ", ".join(f"'{e}'" for e in clean_entries) - slots += ", '__weakref__'" - + clean_entries.extend(entry.strip("?") for entry in self.entries) + slots = ", ".join(f"'{e}'" for e in clean_entries) + ", '__weakref__'" else: slots = "'__weakref__'" arguments_list = "(self)" @@ -281,23 +288,18 @@ def children(self): nodelist = [] return tuple(nodelist) """ - clean_children = [] - clean_seq_children = [] - for child in self.children: - clean_children.append(child.strip("?")) - - for seq_child in self.seq_children: - clean_seq_children.append(seq_child.strip("?")) - + clean_children = [child.strip("?") for child in self.children] + clean_seq_children = [seq_child.strip("?") for seq_child in self.seq_children] src = " def children(self):\n" if self.entries: src += " nodelist = []\n" for child in clean_children: - src += (" if self.%(child)s is not None:" + ' nodelist.append(("%(child)s", self.%(child)s))\n') % ( - dict(child=child) - ) + src += ( + " if self.%(child)s is not None:" + + ' nodelist.append(("%(child)s", self.%(child)s))\n' + ) % (dict(child=child)) for seq_child in clean_seq_children: src += ( @@ -318,8 +320,9 @@ def _gen_attr_names(self): Generated source example: attr_names = ('AxisPoints', ) """ - clean_attributes = [] - for attribute in self.attributes: - clean_attributes.append(attribute.strip("?")) - src = " attr_names = (" + ''.join(f"{nm!r}, " for nm in clean_attributes) + ')' - return src + clean_attributes = [attribute.strip("?") for attribute in self.attributes] + return ( + " attr_names = (" + + "".join(f"{nm!r}, " for nm in clean_attributes) + + ")" + ) diff --git a/a2lparser/a2l/ast/ast_node_stack.py b/a2lparser/a2l/ast/ast_node_stack.py index 82e2c30..387b62d 100644 --- a/a2lparser/a2l/ast/ast_node_stack.py +++ b/a2lparser/a2l/ast/ast_node_stack.py @@ -21,12 +21,17 @@ class ASTNodeStack: """ - AST Node Stack. + ASTNodeStack. - Keeps track of current nodes parsed in the grammar rules. + Utility class to keep track of current nodes parsed in the grammar rules. """ def __init__(self): + """ + Initializes the ASTNodeStack. + + Creates an instance of ASTNodeStack with an empty stack. + """ self._stack = [] def create_node(self, node): diff --git a/a2lparser/a2l/lex/lexer_regex.py b/a2lparser/a2l/lex/lexer_regex.py index 3d90bd9..a22f318 100644 --- a/a2lparser/a2l/lex/lexer_regex.py +++ b/a2lparser/a2l/lex/lexer_regex.py @@ -52,11 +52,16 @@ class LexerRegex: exponent_part = r"""([eE][-+]?[0-9]+)""" fractional_constant = r"""([+-]?[0-9]+\.[0-9]+)|([+-]?\.[0-9]+)|([+-]?[0-9]+\.)""" floating_constant = ( - f"( ( (({fractional_constant}){exponent_part}?) | ([0-9]+{exponent_part}) | ([+-]?[0-9]+{exponent_part}) )[FfLl]?)" + f"( ( (({fractional_constant}){exponent_part}?) | " + f"([0-9]+{exponent_part}) | ([+-]?[0-9]+{exponent_part}) )[FfLl]?)" ) binary_exponent_part = r"""([pP][+-]?[0-9]+)""" - hex_fractional_constant = f"((({hex_digits}" + r""")?\.""" + hex_digits + ")|(" + hex_digits + r"""\.))""" - hex_floating_constant = f"({hex_prefix}({hex_digits}|{hex_fractional_constant}){binary_exponent_part}[FfLl]?)" + hex_fractional_constant = ( + f"((({hex_digits}" + r""")?\.""" + hex_digits + ")|(" + hex_digits + r"""\.))""" + ) + hex_floating_constant = ( + f"({hex_prefix}({hex_digits}|{hex_fractional_constant}){binary_exponent_part}[FfLl]?)" + ) ############################# # RegEx for parsing strings # diff --git a/a2lparser/a2l/rules/rules_sections.py b/a2lparser/a2l/rules/rules_sections.py index 809c562..508ac5c 100644 --- a/a2lparser/a2l/rules/rules_sections.py +++ b/a2lparser/a2l/rules/rules_sections.py @@ -126,7 +126,9 @@ def p_annotation_opt(self, p): """ node = self.stack.get_or_create_node(ASTNodes.Annotation_Opt) self.stack.add_node_param( - node, [ASTNodes.Annotation_Label, ASTNodes.Annotation_Origin, ASTNodes.Annotation_Text], p[1] + node, + [ASTNodes.Annotation_Label, ASTNodes.Annotation_Origin, ASTNodes.Annotation_Text], + p[1], ) p[0] = node @@ -189,7 +191,12 @@ def p_axis_descr(self, p): END AXIS_DESCR """ p[0] = ASTNodes.Axis_Descr( - Attribute=p[3], InputQuantity=p[4], Conversion=p[5], MaxAxisPoints=p[6], LowerLimit=p[7], UpperLimit=p[8] + Attribute=p[3], + InputQuantity=p[4], + Conversion=p[5], + MaxAxisPoints=p[6], + LowerLimit=p[7], + UpperLimit=p[8], ) if len(p) == 12: p[0].OptionalParams = p[9] @@ -253,7 +260,9 @@ def p_axis_descr_opt_objects_list(self, p): axis_descr_opt : annotation """ node = self.stack.get_or_create_node(ASTNodes.Axis_Descr_Opt) - self.stack.add_node_object_list(node_class=node, ast_node_names=[ASTNodes.Annotation], param=p[1]) + self.stack.add_node_object_list( + node_class=node, ast_node_names=[ASTNodes.Annotation], param=p[1] + ) p[0] = node def p_axis_descr_opt_list(self, p): @@ -266,11 +275,13 @@ def p_axis_descr_opt_list(self, p): def p_axis_pts(self, p): """ axis_pts : BEGIN AXIS_PTS \ - ident string_literal constant ident ident constant ident constant constant constant \ + ident string_literal constant ident ident constant \ + ident constant constant constant \ END AXIS_PTS | BEGIN AXIS_PTS \ - ident string_literal constant ident ident constant ident constant constant constant axis_pts_opt_list \ + ident string_literal constant ident ident constant \ + ident constant constant constant axis_pts_opt_list \ END AXIS_PTS """ p[0] = self.stack.create_node( @@ -343,8 +354,9 @@ def p_axis_pts_opt_objects(self, p): ASTNodes.Max_Refresh, ASTNodes.Extended_Limits, ASTNodes.Symbol_Link, - ASTNodes.Function_List], - param=p[1] + ASTNodes.Function_List, + ], + param=p[1], ) p[0] = node @@ -354,7 +366,9 @@ def p_axis_pts_opt_objects_list(self, p): | if_data """ node = self.stack.get_or_create_node(ASTNodes.Axis_Pts_Opt) - self.stack.add_node_object_list(node_class=node, ast_node_names=[ASTNodes.Annotation, ASTNodes.If_Data], param=p[1]) + self.stack.add_node_object_list( + node_class=node, ast_node_names=[ASTNodes.Annotation, ASTNodes.If_Data], param=p[1] + ) p[0] = node def p_axis_pts_opt_list(self, p): @@ -402,10 +416,15 @@ def p_axis_pts_5(self, p): def p_axis_rescale_x(self, p): """ - axis_rescale_x : AXIS_RESCALE_X constant datatype_enum constant indexorder_enum addrtype_enum + axis_rescale_x : AXIS_RESCALE_X constant datatype_enum \ + constant indexorder_enum addrtype_enum """ p[0] = ASTNodes.Axis_Rescale_X( - Position=p[2], Datatype=p[3], MaxNumberOfRescalePairs=p[4], IndexIncr=p[5], Addressing=p[6] + Position=p[2], + Datatype=p[3], + MaxNumberOfRescalePairs=p[4], + IndexIncr=p[5], + Addressing=p[6], ) def p_bit_mask(self, p): @@ -529,7 +548,8 @@ def p_calibration_access(self, p): def p_calibration_handle(self, p): """ calibration_handle : BEGIN CALIBRATION_HANDLE constant_list END CALIBRATION_HANDLE - | BEGIN CALIBRATION_HANDLE constant_list calibration_handle_text END CALIBRATION_HANDLE + | BEGIN CALIBRATION_HANDLE constant_list calibration_handle_text \ + END CALIBRATION_HANDLE """ p[0] = ASTNodes.Calibration_Handle(Handle=p[3]) if len(p) == 7: @@ -575,7 +595,8 @@ def p_calibration_method_opt_list(self, p): def p_characteristic(self, p): """ characteristic : BEGIN CHARACTERISTIC \ - ident string_literal characteristic_enum constant ident constant ident constant constant \ + ident string_literal characteristic_enum constant ident \ + constant ident constant constant \ END CHARACTERISTIC | BEGIN CHARACTERISTIC \ @@ -679,7 +700,9 @@ def p_characteristic_opt_objects_list(self, p): """ node = self.stack.get_or_create_node(ASTNodes.Characteristic_Opt) self.stack.add_node_object_list( - node_class=node, ast_node_names=[ASTNodes.Annotation, ASTNodes.Axis_Descr, ASTNodes.If_Data], param=p[1] + node_class=node, + ast_node_names=[ASTNodes.Annotation, ASTNodes.Axis_Descr, ASTNodes.If_Data], + param=p[1], ) p[0] = node @@ -721,10 +744,13 @@ def p_compu_method(self, p): END COMPU_METHOD | BEGIN COMPU_METHOD \ - ident string_literal conversion_type_enum string_literal string_literal compu_method_opt_list \ + ident string_literal conversion_type_enum \ + string_literal string_literal compu_method_opt_list \ END COMPU_METHOD """ - p[0] = ASTNodes.Compu_Method(Name=p[3], LongIdentifier=p[4], ConversionType=p[5], Format=p[6], Unit=p[7]) + p[0] = ASTNodes.Compu_Method( + Name=p[3], LongIdentifier=p[4], ConversionType=p[5], Format=p[6], Unit=p[7] + ) if len(p) == 11: p[0].OptionalParams = p[8] self.stack.remove_node(ASTNodes.Compu_Method_Opt) @@ -737,7 +763,9 @@ def p_compu_method_opt_params(self, p): """ node = self.stack.get_or_create_node(ASTNodes.Compu_Method_Opt) self.stack.add_node_param( - node_class=node, ast_node_names=[ASTNodes.Compu_Tab_Ref, ASTNodes.Ref_Unit, ASTNodes.Status_String_Ref], param=p[1] + node_class=node, + ast_node_names=[ASTNodes.Compu_Tab_Ref, ASTNodes.Ref_Unit, ASTNodes.Status_String_Ref], + param=p[1], ) p[0] = node @@ -774,10 +802,17 @@ def p_compu_tab(self, p): END COMPU_TAB | BEGIN COMPU_TAB \ - ident string_literal conversion_type_enum constant axis_points_list compu_tab_opt_list \ + ident string_literal conversion_type_enum constant \ + axis_points_list compu_tab_opt_list \ END COMPU_TAB """ - p[0] = ASTNodes.Compu_Tab(Name=p[3], LongIdentifier=p[4], ConversionType=p[5], NumberValuePairs=p[6], Axis_Points=p[7]) + p[0] = ASTNodes.Compu_Tab( + Name=p[3], + LongIdentifier=p[4], + ConversionType=p[5], + NumberValuePairs=p[6], + Axis_Points=p[7], + ) if len(p) == 11: p[0].OptionalParams = p[8] self.stack.remove_node(ASTNodes.Compu_Tab_Opt) @@ -819,11 +854,16 @@ def p_compu_vtab(self, p): END COMPU_VTAB | BEGIN COMPU_VTAB \ - ident string_literal conversion_type_enum constant inVal_outVal_list default_value \ + ident string_literal conversion_type_enum constant \ + inVal_outVal_list default_value \ END COMPU_VTAB """ p[0] = ASTNodes.Compu_Vtab( - Name=p[3], LongIdentifier=p[4], ConversionType=p[5], NumberValuePairs=p[6], InVal_OutVal=p[7] + Name=p[3], + LongIdentifier=p[4], + ConversionType=p[5], + NumberValuePairs=p[6], + InVal_OutVal=p[7], ) if len(p) == 11: p[0].Default_Value = getattr(p[8], p[8].__slots__[0]) @@ -835,10 +875,13 @@ def p_compu_vtab_range(self, p): END COMPU_VTAB_RANGE | BEGIN COMPU_VTAB_RANGE \ - ident string_literal constant inVal_MinMax_outVal_list default_value \ + ident string_literal constant inVal_MinMax_outVal_list \ + default_value \ END COMPU_VTAB_RANGE """ - p[0] = ASTNodes.Compu_Vtab_Range(Name=p[3], LongIdentifier=p[4], NumberValueTriples=p[5], InVal_MinMax_OutVal=p[6]) + p[0] = ASTNodes.Compu_Vtab_Range( + Name=p[3], LongIdentifier=p[4], NumberValueTriples=p[5], InVal_MinMax_OutVal=p[6] + ) if len(p) == 10: p[0].Default_Value = getattr(p[7], p[7].__slots__[0]) @@ -1097,7 +1140,9 @@ def p_frame_opt_params(self, p): frame_opt : frame_measurement """ node = self.stack.get_or_create_node(ASTNodes.Frame_Opt) - self.stack.add_node_param(node_class=node, ast_node_names=[ASTNodes.Frame_Measurement], param=p[1]) + self.stack.add_node_param( + node_class=node, ast_node_names=[ASTNodes.Frame_Measurement], param=p[1] + ) p[0] = node def p_frame_opt_objects_list(self, p): @@ -1105,7 +1150,9 @@ def p_frame_opt_objects_list(self, p): frame_opt : if_data """ node = self.stack.get_or_create_node(ASTNodes.Frame_Opt) - self.stack.add_node_object_list(node_class=node, ast_node_names=[ASTNodes.If_Data], param=p[1]) + self.stack.add_node_object_list( + node_class=node, ast_node_names=[ASTNodes.If_Data], param=p[1] + ) p[0] = node def p_frame_opt_list(self, p): @@ -1136,7 +1183,9 @@ def p_function_opt_params(self, p): function_opt : function_version """ node = self.stack.get_or_create_node(ASTNodes.Function_Opt) - self.stack.add_node_param(node_class=node, ast_node_names=[ASTNodes.Function_Version], param=p[1]) + self.stack.add_node_param( + node_class=node, ast_node_names=[ASTNodes.Function_Version], param=p[1] + ) p[0] = node def p_function_opt_objects(self, p): @@ -1223,7 +1272,12 @@ def p_group_opt_objects(self, p): node = self.stack.get_or_create_node(ASTNodes.Group_Opt) self.stack.add_node_object( node_class=node, - ast_node_names=[ASTNodes.Function_List, ASTNodes.Ref_Characteristic, ASTNodes.Ref_Measurement, ASTNodes.Sub_Group], + ast_node_names=[ + ASTNodes.Function_List, + ASTNodes.Ref_Characteristic, + ASTNodes.Ref_Measurement, + ASTNodes.Sub_Group, + ], param=p[1], ) p[0] = node @@ -1234,7 +1288,9 @@ def p_group_opt_objects_list(self, p): | if_data """ node = self.stack.get_or_create_node(ASTNodes.Group_Opt) - self.stack.add_node_object_list(node_class=node, ast_node_names=[ASTNodes.Annotation, ASTNodes.If_Data], param=p[1]) + self.stack.add_node_object_list( + node_class=node, ast_node_names=[ASTNodes.Annotation, ASTNodes.If_Data], param=p[1] + ) p[0] = node def p_group_opt_list(self, p): @@ -1266,7 +1322,9 @@ def p_header_opt(self, p): | version """ node = self.stack.get_or_create_node(ASTNodes.Header_Opt) - self.stack.add_node_param(node_class=node, ast_node_names=[ASTNodes.Project_No, ASTNodes.Version], param=p[1]) + self.stack.add_node_param( + node_class=node, ast_node_names=[ASTNodes.Project_No, ASTNodes.Version], param=p[1] + ) p[0] = node def p_header_opt_list(self, p): @@ -1289,7 +1347,10 @@ def p_if_data(self, p): data_params = [x for x in p[2] if not isinstance(x, ASTNodes.If_Data_Block)] if_data_block = [x for x in p[2] if isinstance(x, ASTNodes.If_Data_Block)] p[0] = ASTNodes.If_Data( - Name=p[1], OptionalParams=ASTNodes.If_Data_Opt(DataParams=data_params, If_Data_Block=if_data_block) + Name=p[1], + OptionalParams=ASTNodes.If_Data_Opt( + DataParams=data_params, If_Data_Block=if_data_block + ), ) def p_if_data_mandatory_only(self, p): @@ -1362,7 +1423,9 @@ def p_if_data_block(self, p): """ if_data_block = [x for x in p[2] if isinstance(x, ASTNodes.If_Data_Block)] data_params = [x for x in p[2] if not isinstance(x, ASTNodes.If_Data_Block)] - p[0] = ASTNodes.If_Data_Block(Name=p[1], DataParams=data_params, If_Data_Block=if_data_block) + p[0] = ASTNodes.If_Data_Block( + Name=p[1], DataParams=data_params, If_Data_Block=if_data_block + ) def p_if_data_block_empty(self, p): """ @@ -1463,7 +1526,9 @@ def p_instance_opt_objects_list(self, p): """ node = self.stack.get_or_create_node(ASTNodes.Instance_Opt) self.stack.add_node_object_list( - node_class=node, ast_node_names=[ASTNodes.Annotation, ASTNodes.If_Data, ASTNodes.Overwrite], param=p[1] + node_class=node, + ast_node_names=[ASTNodes.Annotation, ASTNodes.If_Data, ASTNodes.Overwrite], + param=p[1], ) p[0] = node @@ -1622,7 +1687,9 @@ def p_measurement_opt_objects_list(self, p): | if_data """ node = self.stack.get_or_create_node(ASTNodes.Measurement_Opt) - self.stack.add_node_object_list(node_class=node, ast_node_names=[ASTNodes.Annotation, ASTNodes.If_Data], param=p[1]) + self.stack.add_node_object_list( + node_class=node, ast_node_names=[ASTNodes.Annotation, ASTNodes.If_Data], param=p[1] + ) p[0] = node def p_measurement_opt_list(self, p): @@ -1676,7 +1743,14 @@ def p_memory_segment(self, p): END MEMORY_SEGMENT """ p[0] = ASTNodes.Memory_Segment( - Name=p[3], LongIdentifier=p[4], PrgType=p[5], MemoryType=p[6], Attribute=p[7], Address=p[8], Size=p[9], Offset=p[10] + Name=p[3], + LongIdentifier=p[4], + PrgType=p[5], + MemoryType=p[6], + Attribute=p[7], + Address=p[8], + Size=p[9], + Offset=p[10], ) if len(p) == 14: @@ -2107,7 +2181,9 @@ def p_project_opt_objects_list(self, p): project_opt : module """ node = self.stack.get_or_create_node(ASTNodes.Project_Opt) - self.stack.add_node_object_list(node_class=node, ast_node_names=[ASTNodes.Module], param=p[1]) + self.stack.add_node_object_list( + node_class=node, ast_node_names=[ASTNodes.Module], param=p[1] + ) p[0] = node def p_project_opt_list(self, p): @@ -2286,7 +2362,9 @@ def p_record_layout_opt_objects_list(self, p): record_layout_opt : reserved """ node = self.stack.get_or_create_node(ASTNodes.Record_Layout_Opt) - self.stack.add_node_object_list(node_class=node, ast_node_names=[ASTNodes.Reserved], param=p[1]) + self.stack.add_node_object_list( + node_class=node, ast_node_names=[ASTNodes.Reserved], param=p[1] + ) p[0] = node def p_record_layout_opt_list(self, p): @@ -2572,8 +2650,15 @@ def p_transformer(self, p): END TRANSFORMER """ - p[0] = ASTNodes.Transformer(Name=p[3], Version=p[4], Executable32=p[5], Executable64=p[6], - Timeout=p[7], Trigger=p[8], InverseTransformer=p[9]) + p[0] = ASTNodes.Transformer( + Name=p[3], + Version=p[4], + Executable32=p[5], + Executable64=p[6], + Timeout=p[7], + Trigger=p[8], + InverseTransformer=p[9], + ) if len(p) == 13: p[0].OptionalParams = p[10] self.stack.remove_node(ASTNodes.Transformer_Opt) @@ -2609,7 +2694,8 @@ def p_transformer_in_objects(self, p): def p_transformer_out_objects(self, p): """ - transformer_out_objects : BEGIN TRANSFORMER_OUT_OBJECTS ident_list END TRANSFORMER_OUT_OBJECTS + transformer_out_objects : BEGIN TRANSFORMER_OUT_OBJECTS ident_list \ + END TRANSFORMER_OUT_OBJECTS """ p[0] = ASTNodes.Transformer_Out_Objects(p[3]) @@ -2624,15 +2710,17 @@ def p_typedef_axis(self, p): ident constant constant constant typedef_axis_opt_list \ END TYPEDEF_AXIS """ - p[0] = ASTNodes.Typedef_Axis(Name=p[3], - LongIdentifier=p[4], - InputQuantity=p[5], - RecordLayout=p[6], - MaxDiff=p[7], - Conversion=p[8], - MaxAxisPoints=p[9], - LowerLimit=p[10], - UpperLimit=p[11]) + p[0] = ASTNodes.Typedef_Axis( + Name=p[3], + LongIdentifier=p[4], + InputQuantity=p[5], + RecordLayout=p[6], + MaxDiff=p[7], + Conversion=p[8], + MaxAxisPoints=p[9], + LowerLimit=p[10], + UpperLimit=p[11], + ) if len(p) == 15: p[0].OptionalParams = p[12] self.stack.remove_node(ASTNodes.Typedef_Axis_Opt) @@ -2662,7 +2750,7 @@ def p_typedef_axis_opt_params(self, p): ASTNodes.Format, ASTNodes.Monotony, ASTNodes.Phys_Unit, - ASTNodes.Step_Size + ASTNodes.Step_Size, ], param=p[1], ) @@ -2673,19 +2761,24 @@ def p_typedef_axis_opt_objects(self, p): typedef_axis_opt : extended_limits """ node = self.stack.get_or_create_node(ASTNodes.Typedef_Axis_Opt) - self.stack.add_node_object(node_class=node, ast_node_names=[ASTNodes.Extended_Limits], param=p[1]) + self.stack.add_node_object( + node_class=node, ast_node_names=[ASTNodes.Extended_Limits], param=p[1] + ) p[0] = node def p_typedef_blob(self, p): """ typedef_blob : BEGIN TYPEDEF_BLOB ident string_literal constant END TYPEDEF_BLOB - | BEGIN TYPEDEF_BLOB ident string_literal constant address_type END TYPEDEF_BLOB + | BEGIN TYPEDEF_BLOB ident string_literal constant address_type \ + END TYPEDEF_BLOB """ if len(p) == 8: p[0] = ASTNodes.Typedef_Blob(Name=p[3], LongIdentifier=p[4], Size=p[5]) elif len(p) == 9: address_type = p[6].AddressType - p[0] = ASTNodes.Typedef_Blob(Name=p[3], LongIdentifier=p[4], Size=p[5], Address_Type=address_type) + p[0] = ASTNodes.Typedef_Blob( + Name=p[3], LongIdentifier=p[4], Size=p[5], Address_Type=address_type + ) def p_typedef_characteristic(self, p): """ @@ -2698,14 +2791,16 @@ def p_typedef_characteristic(self, p): ident constant constant typedef_characteristic_opt_list \ END TYPEDEF_CHARACTERISTIC """ - p[0] = ASTNodes.Typedef_Characteristic(Name=p[3], - LongIdentifier=p[4], - Type=p[5], - RecordLayout=p[6], - MaxDiff=p[7], - Conversion=p[8], - LowerLimit=p[9], - UpperLimit=p[10]) + p[0] = ASTNodes.Typedef_Characteristic( + Name=p[3], + LongIdentifier=p[4], + Type=p[5], + RecordLayout=p[6], + MaxDiff=p[7], + Conversion=p[8], + LowerLimit=p[9], + UpperLimit=p[10], + ) if len(p) == 14: p[0].OptionalParams = p[11] self.stack.remove_node(ASTNodes.Typedef_Characteristic_Opt) @@ -2766,7 +2861,9 @@ def p_typedef_characteristic_opt_objects_list(self, p): typedef_characteristic_opt : axis_descr """ node = self.stack.get_or_create_node(ASTNodes.Typedef_Characteristic_Opt) - self.stack.add_node_object_list(node_class=node, ast_node_names=[ASTNodes.Axis_Descr], param=p[1]) + self.stack.add_node_object_list( + node_class=node, ast_node_names=[ASTNodes.Axis_Descr], param=p[1] + ) p[0] = node def p_typedef_measurement(self, p): @@ -2780,14 +2877,16 @@ def p_typedef_measurement(self, p): constant constant constant typedef_measurement_opt_list \ END TYPEDEF_MEASUREMENT """ - p[0] = ASTNodes.Typedef_Measurement(Name=p[3], - LongIdentifier=p[4], - Datatype=p[5], - Conversion=p[6], - Resolution=p[7], - Accuracy=p[8], - LowerLimit=p[9], - UpperLimit=p[10]) + p[0] = ASTNodes.Typedef_Measurement( + Name=p[3], + LongIdentifier=p[4], + Datatype=p[5], + Conversion=p[6], + Resolution=p[7], + Accuracy=p[8], + LowerLimit=p[9], + UpperLimit=p[10], + ) if len(p) == 14: p[0].OptionalParams = p[11] self.stack.remove_node(ASTNodes.Typedef_Measurement_Opt) @@ -2845,7 +2944,8 @@ def p_typedef_measurement_opt_objects(self, p): def p_typedef_structure(self, p): """ - typedef_structure : BEGIN TYPEDEF_STRUCTURE ident string_literal constant END TYPEDEF_STRUCTURE + typedef_structure : BEGIN TYPEDEF_STRUCTURE ident string_literal constant \ + END TYPEDEF_STRUCTURE | BEGIN TYPEDEF_STRUCTURE \ ident string_literal constant \ typedef_structure_opt_list \ @@ -2886,7 +2986,9 @@ def p_typedef_structure_opt_objects_list(self, p): typedef_structure_opt : structure_component """ node = self.stack.get_or_create_node(ASTNodes.Typedef_Structure_Opt) - self.stack.add_node_object_list(node_class=node, ast_node_names=[ASTNodes.Structure_Component], param=p[1]) + self.stack.add_node_object_list( + node_class=node, ast_node_names=[ASTNodes.Structure_Component], param=p[1] + ) p[0] = node def p_unit(self, p): @@ -2913,8 +3015,11 @@ def p_unit_opt_objects(self, p): | unit_conversion """ node = self.stack.get_or_create_node(ASTNodes.Unit_Opt) - self.stack.add_node_object(node_class=node, ast_node_names=[ - ASTNodes.Si_Exponents, ASTNodes.Unit_Conversion], param=p[1]) + self.stack.add_node_object( + node_class=node, + ast_node_names=[ASTNodes.Si_Exponents, ASTNodes.Unit_Conversion], + param=p[1], + ) p[0] = node def p_unit_opt_list(self, p): @@ -2959,7 +3064,9 @@ def p_user_rights_opt_objects_list(self, p): user_rights_opt : ref_group """ node = self.stack.get_or_create_node(ASTNodes.User_Rights_Opt) - self.stack.add_node_object_list(node_class=node, ast_node_names=[ASTNodes.Ref_Group], param=p[1]) + self.stack.add_node_object_list( + node_class=node, ast_node_names=[ASTNodes.Ref_Group], param=p[1] + ) p[0] = node def p_user_rights_opt_list(self, p): @@ -2980,7 +3087,8 @@ def p_var_characteristic(self, p): """ var_characteristic : BEGIN VAR_CHARACTERISTIC ident END VAR_CHARACTERISTIC | BEGIN VAR_CHARACTERISTIC ident ident_list END VAR_CHARACTERISTIC - | BEGIN VAR_CHARACTERISTIC ident ident_list var_address END VAR_CHARACTERISTIC + | BEGIN VAR_CHARACTERISTIC ident ident_list var_address \ + END VAR_CHARACTERISTIC """ if len(p) == 8: p[0] = ASTNodes.Var_Characteristic(Name=p[3], CriterionName=p[4], Var_Address=p[5]) @@ -2993,10 +3101,13 @@ def p_var_criterion(self, p): """ var_criterion : BEGIN VAR_CRITERION ident string_literal END VAR_CRITERION | BEGIN VAR_CRITERION ident string_literal ident_list END VAR_CRITERION - | BEGIN VAR_CRITERION ident string_literal ident_list var_criterion_opt_list END VAR_CRITERION + | BEGIN VAR_CRITERION ident string_literal ident_list var_criterion_opt_list \ + END VAR_CRITERION """ if len(p) == 9: - p[0] = ASTNodes.Var_Criterion(Name=p[3], LongIdentifier=p[4], Value=p[5], OptionalParams=p[6]) + p[0] = ASTNodes.Var_Criterion( + Name=p[3], LongIdentifier=p[4], Value=p[5], OptionalParams=p[6] + ) self.stack.remove_node(ASTNodes.Var_Criterion_Opt) elif len(p) == 8: p[0] = ASTNodes.Var_Criterion(Name=p[3], LongIdentifier=p[4], Value=p[5]) @@ -3011,7 +3122,9 @@ def p_var_criterion_opt(self, p): """ node = self.stack.get_or_create_node(ASTNodes.Var_Criterion_Opt) self.stack.add_node_param( - node_class=node, ast_node_names=[ASTNodes.Var_Measurement, ASTNodes.Var_Selection_Characteristic], param=p[1] + node_class=node, + ast_node_names=[ASTNodes.Var_Measurement, ASTNodes.Var_Selection_Characteristic], + param=p[1], ) p[0] = node @@ -3066,7 +3179,11 @@ def p_variant_coding_opt_params(self, p): """ node = self.stack.get_or_create_node(ASTNodes.Variant_Coding_Opt) - self.stack.add_node_param(node_class=node, ast_node_names=[ASTNodes.Var_Naming, ASTNodes.Var_Separator], param=p[1]) + self.stack.add_node_param( + node_class=node, + ast_node_names=[ASTNodes.Var_Naming, ASTNodes.Var_Separator], + param=p[1], + ) p[0] = node def p_variant_coding_opt_objects_list(self, p): @@ -3078,7 +3195,11 @@ def p_variant_coding_opt_objects_list(self, p): node = self.stack.get_or_create_node(ASTNodes.Variant_Coding_Opt) self.stack.add_node_object_list( node_class=node, - ast_node_names=[ASTNodes.Var_Characteristic, ASTNodes.Var_Criterion, ASTNodes.Var_Forbidden_Comb], + ast_node_names=[ + ASTNodes.Var_Characteristic, + ASTNodes.Var_Criterion, + ASTNodes.Var_Forbidden_Comb, + ], param=p[1], ) p[0] = node @@ -3104,8 +3225,10 @@ def p_virtual(self, p): def p_virtual_characteristic(self, p): """ - virtual_characteristic : BEGIN VIRTUAL_CHARACTERISTIC string_literal END VIRTUAL_CHARACTERISTIC - | BEGIN VIRTUAL_CHARACTERISTIC string_literal ident_list END VIRTUAL_CHARACTERISTIC + virtual_characteristic : BEGIN VIRTUAL_CHARACTERISTIC string_literal \ + END VIRTUAL_CHARACTERISTIC + | BEGIN VIRTUAL_CHARACTERISTIC string_literal ident_list \ + END VIRTUAL_CHARACTERISTIC """ if len(p) == 7: p[0] = ASTNodes.Virtual_Characteristic(Formula=p[3], Characteristic=p[4]) diff --git a/a2lparser/a2lparser.py b/a2lparser/a2lparser.py index a715079..c8e7e42 100644 --- a/a2lparser/a2lparser.py +++ b/a2lparser/a2lparser.py @@ -37,13 +37,19 @@ class A2LParser: Usage: >>> try: - >>> parser = Parser() - >>> ast = parser.parse_file(files="./data/*.a2l") - >>> except ParsingException as ex: + >>> parser = A2LParser() + >>> ast = parser.parse_file("./data/*.a2l") + >>> except A2LParserException as ex: >>> print(ex) """ - def __init__(self, validation: bool = True, optimize: bool = True, log_level: str = "INFO", quiet: bool = False) -> None: + def __init__( + self, + validation: bool = True, + optimize: bool = True, + log_level: str = "INFO", + quiet: bool = False, + ) -> None: """ Parser Constructor. @@ -64,9 +70,11 @@ def __init__(self, validation: bool = True, optimize: bool = True, log_level: st ( # start of the capturing group for the filename [^\s"']+ # matches any character that is not whitespace or a quotation mark | # OR - "[^"]*" # matches a quoted string (double quotes) capturing the content inside the quotes + "[^"]*" # matches a quoted string (double quotes) capturing + | # the content inside the quotes | # OR - '[^']*' # matches a quoted string (single quotes) capturing the content inside the quotes + '[^']*' # matches a quoted string (single quotes) capturing + # the content inside the quotes ) # end of the capturing group """, re.IGNORECASE | re.VERBOSE, @@ -82,8 +90,21 @@ def __init__(self, validation: bool = True, optimize: bool = True, log_level: st def parse_file(self, files: str) -> dict: """ - Parses the given files. - Returns a dictionary of AbstractSyntaxTree objects with the file name as a key pair. + Parses one or more A2L files and returns a dictionary of AbstractSyntaxTree objects. + The dictionary keys are the file names and the values are the AbstractSyntaxTree objects. + + Args: + files (str): A glob pattern or file path to one or more A2L files. + + Returns: + dict: A dictionary with file names as keys and AbstractSyntaxTree objects as values. + + Raises: + A2LParserException: If no files matching the pattern are found or if parsing fails. + + Example: + ast_dict = parser.parse_file("test.a2l") + ast_obj = ast_dict["test.a2l"] """ ast_objects = {} @@ -107,7 +128,9 @@ def parse_file(self, files: str) -> dict: # Parse the content filename = os.path.basename(a2l_file) - ast_objects[filename] = self._parse_content(content=a2l_content, show_progressbar=self.show_progressbar) + ast_objects[filename] = self.parse_content( + content=a2l_content, show_progressbar=self.show_progressbar + ) logger.success("Created Abstract Syntax Tree from file: {}", filename) except A2LParserException as e: @@ -115,19 +138,35 @@ def parse_file(self, files: str) -> dict: return ast_objects - def _parse_content(self, content: str, show_progressbar: bool = True) -> AbstractSyntaxTree: + def parse_content(self, content: str, show_progressbar: bool = True) -> AbstractSyntaxTree: """ Parses the given content string and returns an AbstractSyntaxTree object. + + Args: + content (str): The content string to be parsed. + show_progressbar (bool, optional): Whether to show a progress bar during parsing. + + Returns: + AbstractSyntaxTree: The parsed AbstractSyntaxTree object. + + Note: + This function may raise A2LParserException. + + Example Usage: + ast = parser.parse_content("A2L content", show_progressbar=True) """ - logger.debug("Starting AST generation...") + logger.debug("Starting to parse A2L content...") return self.parser.generate_ast(content, show_progressbar) def _load_file(self, filename: str, current_dir: str = None) -> str: """ - Reads the content of the given filename and returns it with includes replaced recursively. + Loads the file supporting the /include mechanism of A2L. + Recursively reads the content of the given filename and + replace the /include tags with the content of the included files. Args: filename (str): The filename of the A2L file to be read. + current_dir (str, optional): The current directory of the A2L file. Returns: str: The complete A2L file with the included content. diff --git a/a2lparser/cli/command_prompt.py b/a2lparser/cli/command_prompt.py index ae72bdf..29edae1 100644 --- a/a2lparser/cli/command_prompt.py +++ b/a2lparser/cli/command_prompt.py @@ -52,7 +52,9 @@ def get_session(): """ if CommandPrompt._session is None: history_file: Path = A2L_CLI_HISTORY_FILE - CommandPrompt._session = PromptSession(history=FileHistory(history_file), auto_suggest=AutoSuggestFromHistory()) + CommandPrompt._session = PromptSession( + history=FileHistory(history_file), auto_suggest=AutoSuggestFromHistory() + ) return CommandPrompt._session @staticmethod @@ -69,7 +71,7 @@ def prompt(ast): """ local_vars = {"ast": ast} - print("You can access the 'ast' attribute which holds the abstract syntax tree as a reference.\n") + print("You can access the 'ast' attribute which holds the AST as a reference.\n") while True: try: diff --git a/a2lparser/converter/a2l_converter.py b/a2lparser/converter/a2l_converter.py index 5c86336..0962b7e 100644 --- a/a2lparser/converter/a2l_converter.py +++ b/a2lparser/converter/a2l_converter.py @@ -24,7 +24,7 @@ from a2lparser.a2l.ast.abstract_syntax_tree import AbstractSyntaxTree -class A2LConverter(): +class A2LConverter: """ Base class for converting an A2L dictionary. """ @@ -42,7 +42,7 @@ def slice_ast(self, ast: dict, file_extension: str, filename: str = None) -> lis Args: ast (dict): The AST dictionary to be sliced. file_extension (str): The file extension to be used for the output files. - filename (str, optional): The filename to be used for the output files. Defaults to None. + filename (str, optional): The filename to be used for the output files. Defaults None. Returns: list: A list of (filename, ast) tuples. @@ -54,7 +54,11 @@ def slice_ast(self, ast: dict, file_extension: str, filename: str = None) -> lis # If no filename is given, we raise an exception. if filename is None: raise self.A2LConverterException( - "Could not find filename in AST. Pass filename argument to resolve this error.") + ( + "Could not find filename in AST. " + "Pass filename argument to resolve this error." + ) + ) root = self.add_root_element(ast) out_filename = f"{filename}.{file_extension}" result.append((out_filename, root)) @@ -66,12 +70,12 @@ def slice_ast(self, ast: dict, file_extension: str, filename: str = None) -> lis if filename is not None: out_filename = f"filename_{index}.{file_extension}" else: - out_filename = f"{self.remove_file_extension(input_filename)}.{file_extension}" + out_filename = ( + f"{self.remove_file_extension(input_filename)}.{file_extension}" + ) result.append((out_filename, root)) except Exception as e: - raise self.A2LConverterException( - f"Conversion Error while slicing AST: {e}" - ) from e + raise self.A2LConverterException(f"Conversion Error while slicing AST: {e}") from e return result def write_to_file(self, content: str, filename: str, output_dir: str = ".") -> None: @@ -86,23 +90,24 @@ def write_to_file(self, content: str, filename: str, output_dir: str = ".") -> N if output_dir is None: output_dir = "." else: - output_dir = output_dir.replace("\"", "").replace("'", "") + output_dir = output_dir.replace('"', "").replace("'", "") full_output_dir = os.path.abspath(output_dir) if not os.path.isdir(full_output_dir): raise self.A2LConverterException( - f"Unable to write to output directory: '{full_output_dir}'. The directory does not exist." + ( + f"Unable to write to output directory: '{full_output_dir}'. " + "The directory does not exist." + ) ) full_path = os.path.join(full_output_dir, filename) try: - with open(full_path, 'w', encoding="utf-8") as f: + with open(full_path, "w", encoding="utf-8") as f: f.write(content) except Exception as e: - raise self.A2LConverterException( - f"Error writing to file '{filename}': {e}" - ) from e + raise self.A2LConverterException(f"Error writing to file '{filename}': {e}") from e def is_ast_valid_structure(self, ast: dict) -> bool: """ diff --git a/a2lparser/converter/json_converter.py b/a2lparser/converter/json_converter.py index 1e70bd4..0521549 100644 --- a/a2lparser/converter/json_converter.py +++ b/a2lparser/converter/json_converter.py @@ -37,7 +37,9 @@ class JSONConverterException(Exception): Exception raised when an error occurs while converting an AST to a JSON file. """ - def convert(self, ast: dict, output_dir: str = ".", output_filename: str = None, pretty: bool = True) -> None: + def convert( + self, ast: dict, output_dir: str = ".", output_filename: str = None, pretty: bool = True + ) -> None: """ Convert the given AST dictionary to JSON and write it to a file. @@ -58,19 +60,19 @@ def convert(self, ast: dict, output_dir: str = ".", output_filename: str = None, except Exception as e: raise self.JSONConverterException(e) from e - def convert_to_string(self, ast: dict, - output_filename: str = None, - pretty: bool = True) -> list: + def convert_to_string( + self, ast: dict, output_filename: str = None, pretty: bool = True + ) -> list: """ Convert the given AST dictionary to a JSON string. Args: ast (dict): The AST dictionary to be converted to JSON. output_filename (str, optional): The filename to be used. - pretty (bool, optional): Whether to format the JSON string with indentation and newlines. + pretty (bool, optional): Format the JSON string with indentation and newlines. Returns: - str: List of tuples (filename, json_string). + list: List of tuples (filename, json_string). """ try: result = [] diff --git a/a2lparser/converter/xml_converter.py b/a2lparser/converter/xml_converter.py index 180857b..4fa992e 100644 --- a/a2lparser/converter/xml_converter.py +++ b/a2lparser/converter/xml_converter.py @@ -38,7 +38,12 @@ class XMLConverterException(Exception): """ def convert( - self, ast: dict, output_dir: str = ".", output_filename: str = None, encoding: str = "utf-8", pretty: bool = True + self, + ast: dict, + output_dir: str = ".", + output_filename: str = None, + encoding: str = "utf-8", + pretty: bool = True, ) -> None: """ Convert the given AST dictionary to XML and write it to a file. @@ -61,21 +66,20 @@ def convert( except Exception as e: raise self.XMLConverterException(e) from e - def convert_to_string(self, ast: dict, - output_filename: str = None, - encoding: str = "utf-8", - pretty: bool = True) -> list: + def convert_to_string( + self, ast: dict, output_filename: str = None, encoding: str = "utf-8", pretty: bool = True + ) -> list: """ Convert the given AST dictionary to a XML string. Args: ast (dict): The AST dictionary to be converted to XML. output_filename (str, optional): The filename to be used for the XML string. - encoding (str, optional): The encoding to be used for the XML string (default is "utf-8"). - pretty (bool, optional): Whether to format the XML string with indentation and newlines (default is True). + encoding (str, optional): The encoding to be used for the XML string (defaults "utf-8"). + pretty (bool, optional): Format the XML string with indentation and newlines. Returns: - str: List of tuples (filename, xml_string). + list: List of tuples (filename, xml_string). """ try: result = [] diff --git a/a2lparser/converter/yaml_converter.py b/a2lparser/converter/yaml_converter.py index 62aaa7d..63369f3 100644 --- a/a2lparser/converter/yaml_converter.py +++ b/a2lparser/converter/yaml_converter.py @@ -37,9 +37,7 @@ class YAMLConverterException(Exception): Exception raised when an error occurs while converting an AST to a YAML file. """ - def convert(self, ast: dict, - output_dir: str = ".", - output_filename: str = None) -> None: + def convert(self, ast: dict, output_dir: str = ".", output_filename: str = None) -> None: """ Convert the given AST dictionary to YAML and write it to a file. @@ -68,7 +66,7 @@ def convert_to_string(self, ast: dict, output_filename: str = None) -> list: output_filename (str, optional): The filename to be used. Returns: - str: List of tuples (filename, yaml_string). + list: List of tuples (filename, yaml_string). """ try: result = [] diff --git a/a2lparser/main.py b/a2lparser/main.py index 5ffebcc..00adb25 100644 --- a/a2lparser/main.py +++ b/a2lparser/main.py @@ -48,7 +48,7 @@ def main() -> None: Usage from root project dir: $ python -m a2lparser.main --help - Documentation at: https://github.com/mrom1/a2lparser + Repository at: https://github.com/mrom1/a2lparser """ try: args = parse_arguments(sys.argv[1:]) @@ -83,7 +83,10 @@ def main() -> None: # Initializing the A2L Parser parser = A2LParser( - validation=not args.no_validation, optimize=not args.no_optimize, log_level=args.log_level, quiet=args.quiet + validation=not args.no_validation, + optimize=not args.no_optimize, + log_level=args.log_level, + quiet=args.quiet, ) # Parse input files into abstract syntax tree @@ -125,14 +128,37 @@ def parse_arguments(args: list) -> argparse.Namespace: """ parser = argparse.ArgumentParser(prog="a2lparser") parser.add_argument("file", nargs="?", help="A2L files to parse") - parser.add_argument("-x", "--xml", action="store_true", help="Converts an A2L file to a XML output file") - parser.add_argument("-j", "--json", action="store_true", help="Converts an A2L file to a JSON output file") - parser.add_argument("-y", "--yaml", action="store_true", help="Converts an A2L file to a YAML output file") - parser.add_argument("--output-dir", nargs="?", default=None, metavar="PATH", help="Output directory for converted files") - parser.add_argument("--prompt", action="store_true", default=False, help="Enables CLI prompt after parsing") - parser.add_argument("--quiet", action="store_true", default=False, help="Disables console output") - parser.add_argument("--no-optimize", action="store_true", default=False, help="Disables optimization mode") - parser.add_argument("--no-validation", action="store_true", default=False, help="Disables possible A2L validation warnings") + parser.add_argument( + "-x", "--xml", action="store_true", help="Converts an A2L file to a XML output file" + ) + parser.add_argument( + "-j", "--json", action="store_true", help="Converts an A2L file to a JSON output file" + ) + parser.add_argument( + "-y", "--yaml", action="store_true", help="Converts an A2L file to a YAML output file" + ) + parser.add_argument( + "--output-dir", + nargs="?", + default=None, + metavar="PATH", + help="Output directory for converted files", + ) + parser.add_argument( + "--prompt", action="store_true", default=False, help="Enables CLI prompt after parsing" + ) + parser.add_argument( + "--quiet", action="store_true", default=False, help="Disables console output" + ) + parser.add_argument( + "--no-optimize", action="store_true", default=False, help="Disables optimization mode" + ) + parser.add_argument( + "--no-validation", + action="store_true", + default=False, + help="Disables possible A2L validation warnings", + ) parser.add_argument( "--gen-ast", nargs="?", @@ -140,7 +166,9 @@ def parse_arguments(args: list) -> argparse.Namespace: const=A2L_DEFAULT_CONFIG_NAME, help="Generates python file containing AST node classes", ) - parser.add_argument("--log-level", default="INFO", choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]) + parser.add_argument( + "--log-level", default="INFO", choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] + ) parser.add_argument("--version", action="version", version=f"a2lparser version: {__version__}") return parser.parse_args(args) diff --git a/pyproject.toml b/pyproject.toml index 5c702fa..3c6a901 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ package-data = {"a2lparser" = ["*.cfg", "*.config"]} a2lparser = "a2lparser.main:main" [tool.black] -line-length = 128 +line-length = 100 include = '\.pyi?$' exclude = ''' /( diff --git a/tests/ast/test_ast_abstract_syntax_tree.py b/tests/ast/test_ast_abstract_syntax_tree.py index b73f6fc..f66a5a0 100644 --- a/tests/ast/test_ast_abstract_syntax_tree.py +++ b/tests/ast/test_ast_abstract_syntax_tree.py @@ -33,7 +33,10 @@ def a2l_dict(): return { "VERSION": {"Major_Version": 1, "Minor_Version": 0, "Patch_Version": 0}, "MODULE": {"MODULE_NAME": "MyModule"}, - "ANNOTATION": {"ANNOTATION_NAME": "MyAnnotation", "ANNOTATION_TEXT": "This is my annotation"}, + "ANNOTATION": { + "ANNOTATION_NAME": "MyAnnotation", + "ANNOTATION_TEXT": "This is my annotation", + }, "CHARACTERISTIC": {"CHARACTERISTIC_NAME": "MyCharacteristic"}, "MEASUREMENT": { "MEASUREMENT_NAME": "MyMeasurement", @@ -64,7 +67,13 @@ def a2l_dict_nested(): }, "CHARACTERISTIC": { "CHARACTERISTIC_NAME": "MyCharacteristic", - "ANNOTATION": {"ANNOTATION_TEXT": ["Characteristic annotation", "inside a LIST", "a LIST with three items"]}, + "ANNOTATION": { + "ANNOTATION_TEXT": [ + "Characteristic annotation", + "inside a LIST", + "a LIST with three items", + ] + }, }, "MEASUREMENT": { "MEASUREMENT_NAME": "MyMeasurement", @@ -77,7 +86,13 @@ def a2l_dict_nested(): "CHARACTERISTIC_NAME": "MyCharacteristicEcuAddress", "ECU_ADDRESS": "0x4488ee44", "ANNOTATION": [ - {"ANNOTATION_TEXT": ["Characteristic annotation", "inside a LIST", "a LIST with three items"]}, + { + "ANNOTATION_TEXT": [ + "Characteristic annotation", + "inside a LIST", + "a LIST with three items", + ] + }, {"ANNOTATION_TEXT": ["ecu address: 0x88ff8800", "ecu_addr_match: 0x4488ef11"]}, {"ANNOTATION_TEXT": ["ecu_addr_no_match: 0xff88ef11"]}, ], @@ -162,8 +177,12 @@ def test_abstract_syntax_tree_find_value(a2l_dict_nested, a2l_node): expected_ecu_address_root = a2l_dict_nested["ECU_ADDRESS"] expected_ecu_address_measurement = a2l_dict_nested["MEASUREMENT"]["ECU_ADDRESS"] - expected_ecu_address_measurement_characteristic = a2l_dict_nested["MEASUREMENT"]["CHARACTERISTIC"]["ECU_ADDRESS"] - expected_ecu_address_in_annotation_text = a2l_dict_nested["MEASUREMENT"]["CHARACTERISTIC"]["ANNOTATION"][1] + expected_ecu_address_measurement_characteristic = a2l_dict_nested["MEASUREMENT"][ + "CHARACTERISTIC" + ]["ECU_ADDRESS"] + expected_ecu_address_in_annotation_text = a2l_dict_nested["MEASUREMENT"]["CHARACTERISTIC"][ + "ANNOTATION" + ][1] ecu_address = ast.find_value("0x4488") assert ecu_address assert ecu_address["ECU_ADDRESS"] == expected_ecu_address_root @@ -173,7 +192,10 @@ def test_abstract_syntax_tree_find_value(a2l_dict_nested, a2l_node): assert ecu_address_measurement["ECU_ADDRESS"] == expected_ecu_address_measurement ecu_address_measurement_characteristic = ecu_address_measurement["CHARACTERISTIC"] - assert ecu_address_measurement_characteristic["ECU_ADDRESS"] == expected_ecu_address_measurement_characteristic + assert ( + ecu_address_measurement_characteristic["ECU_ADDRESS"] + == expected_ecu_address_measurement_characteristic + ) assert ( ecu_address_measurement_characteristic["ANNOTATION"]["ANNOTATION_TEXT"] == expected_ecu_address_in_annotation_text["ANNOTATION_TEXT"] diff --git a/tests/ast/test_ast_generator.py b/tests/ast/test_ast_generator.py index 27bd6ac..2f3f413 100644 --- a/tests/ast/test_ast_generator.py +++ b/tests/ast/test_ast_generator.py @@ -35,7 +35,9 @@ def test_ast_generator(): temp_test_output_path = A2L_PACKAGE_DIR / "../testfiles" temp_test_dir_prefix = "temp_dir_output_" - with tempfile.TemporaryDirectory(dir=temp_test_output_path, prefix=temp_test_dir_prefix) as tempdir: + with tempfile.TemporaryDirectory( + dir=temp_test_output_path, prefix=temp_test_dir_prefix + ) as tempdir: config_file = A2L_CONFIGS_DIR / A2L_DEFAULT_CONFIG_NAME ast_python_file = os.path.join(tempdir, "test_a2l_ast.py") ast_generator = ASTGenerator(cfg_filename=str(config_file), out_filename=ast_python_file) diff --git a/tests/ast/test_ast_is_value_match.py b/tests/ast/test_ast_is_value_match.py index 73a4115..12628e9 100644 --- a/tests/ast/test_ast_is_value_match.py +++ b/tests/ast/test_ast_is_value_match.py @@ -82,7 +82,11 @@ def test_ast_is_value_match_nested_list(nested_dict): """ ast = AbstractSyntaxTree(None, nested_dict) matched = ast._is_value_match( - value=nested_dict, search_expression="a LIST", case_sensitive=False, exact_match=False, recursive_search=True + value=nested_dict, + search_expression="a LIST", + case_sensitive=False, + exact_match=False, + recursive_search=True, ) assert matched is True @@ -114,7 +118,9 @@ def test_ast_is_value_match_nested_list(nested_dict): ("MEASUREMENT annotation", True, True), ], ) -def test_ast_is_value_match_partial(nested_dict, search_expression, case_sensitive, expected_result): +def test_ast_is_value_match_partial( + nested_dict, search_expression, case_sensitive, expected_result +): """ Test matching a part of a value in a nested dictionary. """ @@ -140,7 +146,9 @@ def test_ast_is_value_match_partial(nested_dict, search_expression, case_sensiti ("this is my measurement annotation", False, True), ], ) -def test_ast_is_value_match_case_sensitivity(nested_dict, search_expression, case_sensitive, expected_result): +def test_ast_is_value_match_case_sensitivity( + nested_dict, search_expression, case_sensitive, expected_result +): """ Test for matching a value in a nested dictionary with and without case sensitivity. """ @@ -164,7 +172,9 @@ def test_ast_is_value_match_case_sensitivity(nested_dict, search_expression, cas ("This is my MEASUREMENT annotation", True, True, True), ], ) -def test_ast_is_value_match_exact_case(nested_dict, search_expression, case_sensitive, exact_match, expected_result): +def test_ast_is_value_match_exact_case( + nested_dict, search_expression, case_sensitive, exact_match, expected_result +): """ Test for matching the exact case of a value in a nested dictionary. """ diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..74c01c9 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,27 @@ +####################################################################################### +# a2lparser: https://github.com/mrom1/a2lparser # +# author: https://github.com/mrom1 # +# # +# This file is part of the a2lparser package. # +# # +# a2lparser is free software: you can redistribute it and/or modify it # +# under the terms of the GNU General Public License as published by the # +# Free Software Foundation, either version 3 of the License, or (at your option) # +# any later version. # +# # +# a2lparser is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY # +# or FITNESS FOR A PARTICULAR PURPOSE. # +# See the GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with a2lparser. If not, see . # +####################################################################################### + + +# Import fixtures to make them available across all tests. +# These imports are not used directly in this file but are necessary +# for pytest to recognize and use these fixtures. +from .fixture_utils import create_file # noqa: F401, pylint: disable=unused-import +from .fixture_utils import compare_files # noqa: F401, pylint: disable=unused-import +from .fixture_utils import check_files_exist # noqa: F401, pylint: disable=unused-import diff --git a/tests/converter/test_converter_a2l.py b/tests/converter/test_converter_a2l.py index 2c4139a..ee648fd 100644 --- a/tests/converter/test_converter_a2l.py +++ b/tests/converter/test_converter_a2l.py @@ -32,7 +32,7 @@ def valid_ast_dict_multiple_files() -> dict: }, "filename2.aml": { "key_2": "value_2", - } + }, } @@ -50,7 +50,7 @@ def invalid_ast_dict_from_string() -> dict: }, "MEASUREMENT": { "key": "value", - } + }, } diff --git a/tests/converter/test_converter_json.py b/tests/converter/test_converter_json.py index 3d30817..80c2a65 100644 --- a/tests/converter/test_converter_json.py +++ b/tests/converter/test_converter_json.py @@ -91,7 +91,9 @@ def test_converter_json_single_measurement_entry(): """ ast = A2LYacc().generate_ast(measurement_block) assert ast - json_measurement = JSONConverter().convert_to_string(ast.get_dict(), output_filename="measurement") + json_measurement = JSONConverter().convert_to_string( + ast.get_dict(), output_filename="measurement" + ) assert json_measurement json_measurement_filename, json_measurement_root = json_measurement[0] @@ -113,14 +115,21 @@ def test_converter_json_single_measurement_entry(): assert ast_measurement["UpperLimit"] == json_measurement_parsed["UpperLimit"] assert ast_measurement["BIT_MASK"] == json_measurement_parsed["BIT_MASK"] assert ast_measurement["BYTE_ORDER"] == json_measurement_parsed["BYTE_ORDER"] - assert str(ast_measurement["DISCRETE"]).lower() == str(json_measurement_parsed["DISCRETE"]).lower() + assert ( + str(ast_measurement["DISCRETE"]).lower() == str(json_measurement_parsed["DISCRETE"]).lower() + ) assert ast_measurement["ECU_ADDRESS"] == json_measurement_parsed["ECU_ADDRESS"] - assert ast_measurement["ECU_ADDRESS_EXTENSION"] == json_measurement_parsed["ECU_ADDRESS_EXTENSION"] + assert ( + ast_measurement["ECU_ADDRESS_EXTENSION"] == json_measurement_parsed["ECU_ADDRESS_EXTENSION"] + ) assert ast_measurement["ERROR_MASK"] == json_measurement_parsed["ERROR_MASK"] assert ast_measurement["FORMAT"] == json_measurement_parsed["FORMAT"] assert ast_measurement["LAYOUT"] == json_measurement_parsed["LAYOUT"] assert ast_measurement["PHYS_UNIT"] == json_measurement_parsed["PHYS_UNIT"] - assert str(ast_measurement["READ_WRITE"]).lower() == str(json_measurement_parsed["READ_WRITE"]).lower() + assert ( + str(ast_measurement["READ_WRITE"]).lower() + == str(json_measurement_parsed["READ_WRITE"]).lower() + ) assert ast_measurement["REF_MEMORY_SEGMENT"] == json_measurement_parsed["REF_MEMORY_SEGMENT"] assert ast_measurement["MAX_REFRESH"] == json_measurement_parsed["MAX_REFRESH"] assert ast_measurement["SYMBOL_LINK"] == json_measurement_parsed["SYMBOL_LINK"] diff --git a/tests/converter/test_converter_xml.py b/tests/converter/test_converter_xml.py index 838bdbe..ddf9825 100644 --- a/tests/converter/test_converter_xml.py +++ b/tests/converter/test_converter_xml.py @@ -91,7 +91,9 @@ def test_converter_xml_single_measurement_entry(): """ ast = A2LYacc().generate_ast(measurement_block) assert ast - xml_measurement = XMLConverter().convert_to_string(ast.get_dict(), output_filename="measurement") + xml_measurement = XMLConverter().convert_to_string( + ast.get_dict(), output_filename="measurement" + ) assert xml_measurement xml_measurement_filename, xml_measurement_root = xml_measurement[0] @@ -113,14 +115,21 @@ def test_converter_xml_single_measurement_entry(): assert ast_measurement["UpperLimit"] == xml_measurement_parsed["UpperLimit"] assert ast_measurement["BIT_MASK"] == xml_measurement_parsed["BIT_MASK"] assert ast_measurement["BYTE_ORDER"] == xml_measurement_parsed["BYTE_ORDER"] - assert str(ast_measurement["DISCRETE"]).lower() == str(xml_measurement_parsed["DISCRETE"]).lower() + assert ( + str(ast_measurement["DISCRETE"]).lower() == str(xml_measurement_parsed["DISCRETE"]).lower() + ) assert ast_measurement["ECU_ADDRESS"] == xml_measurement_parsed["ECU_ADDRESS"] - assert ast_measurement["ECU_ADDRESS_EXTENSION"] == xml_measurement_parsed["ECU_ADDRESS_EXTENSION"] + assert ( + ast_measurement["ECU_ADDRESS_EXTENSION"] == xml_measurement_parsed["ECU_ADDRESS_EXTENSION"] + ) assert ast_measurement["ERROR_MASK"] == xml_measurement_parsed["ERROR_MASK"] assert ast_measurement["FORMAT"] == xml_measurement_parsed["FORMAT"] assert ast_measurement["LAYOUT"] == xml_measurement_parsed["LAYOUT"] assert ast_measurement["PHYS_UNIT"] == xml_measurement_parsed["PHYS_UNIT"] - assert str(ast_measurement["READ_WRITE"]).lower() == str(xml_measurement_parsed["READ_WRITE"]).lower() + assert ( + str(ast_measurement["READ_WRITE"]).lower() + == str(xml_measurement_parsed["READ_WRITE"]).lower() + ) assert ast_measurement["REF_MEMORY_SEGMENT"] == xml_measurement_parsed["REF_MEMORY_SEGMENT"] assert ast_measurement["MAX_REFRESH"] == xml_measurement_parsed["MAX_REFRESH"] assert ast_measurement["SYMBOL_LINK"] == xml_measurement_parsed["SYMBOL_LINK"] diff --git a/tests/converter/test_converter_yaml.py b/tests/converter/test_converter_yaml.py index de57553..072b9e3 100644 --- a/tests/converter/test_converter_yaml.py +++ b/tests/converter/test_converter_yaml.py @@ -91,7 +91,9 @@ def test_converter_yaml_single_measurement_entry(): """ ast = A2LYacc().generate_ast(measurement_block) assert ast - yaml_measurement = YAMLConverter().convert_to_string(ast.get_dict(), output_filename="measurement") + yaml_measurement = YAMLConverter().convert_to_string( + ast.get_dict(), output_filename="measurement" + ) assert yaml_measurement yaml_measurement_filename, yaml_measurement_root = yaml_measurement[0] @@ -113,14 +115,21 @@ def test_converter_yaml_single_measurement_entry(): assert ast_measurement["UpperLimit"] == yaml_measurement_parsed["UpperLimit"] assert ast_measurement["BIT_MASK"] == yaml_measurement_parsed["BIT_MASK"] assert ast_measurement["BYTE_ORDER"] == yaml_measurement_parsed["BYTE_ORDER"] - assert str(ast_measurement["DISCRETE"]).lower() == str(yaml_measurement_parsed["DISCRETE"]).lower() + assert ( + str(ast_measurement["DISCRETE"]).lower() == str(yaml_measurement_parsed["DISCRETE"]).lower() + ) assert ast_measurement["ECU_ADDRESS"] == yaml_measurement_parsed["ECU_ADDRESS"] - assert ast_measurement["ECU_ADDRESS_EXTENSION"] == yaml_measurement_parsed["ECU_ADDRESS_EXTENSION"] + assert ( + ast_measurement["ECU_ADDRESS_EXTENSION"] == yaml_measurement_parsed["ECU_ADDRESS_EXTENSION"] + ) assert ast_measurement["ERROR_MASK"] == yaml_measurement_parsed["ERROR_MASK"] assert ast_measurement["FORMAT"] == yaml_measurement_parsed["FORMAT"] assert ast_measurement["LAYOUT"] == yaml_measurement_parsed["LAYOUT"] assert ast_measurement["PHYS_UNIT"] == yaml_measurement_parsed["PHYS_UNIT"] - assert str(ast_measurement["READ_WRITE"]).lower() == str(yaml_measurement_parsed["READ_WRITE"]).lower() + assert ( + str(ast_measurement["READ_WRITE"]).lower() + == str(yaml_measurement_parsed["READ_WRITE"]).lower() + ) assert ast_measurement["REF_MEMORY_SEGMENT"] == yaml_measurement_parsed["REF_MEMORY_SEGMENT"] assert ast_measurement["MAX_REFRESH"] == yaml_measurement_parsed["MAX_REFRESH"] assert ast_measurement["SYMBOL_LINK"] == yaml_measurement_parsed["SYMBOL_LINK"] diff --git a/tests/error_handling/test_error_handling_module.py b/tests/error_handling/test_error_handling_module.py index f2ae13b..289b386 100644 --- a/tests/error_handling/test_error_handling_module.py +++ b/tests/error_handling/test_error_handling_module.py @@ -105,4 +105,13 @@ def test_error_handling_module_erroneous_section_post(): assert module["INSTANCE"]["Address"] == "0xEECCDD00" assert module["IF_DATA"]["Name"] == "ISO" - assert module["IF_DATA"]["DataParams"] == ['SND', '0x10', '0x00', '0x05', '0x08', 'RCV', '4', 'long'] + assert module["IF_DATA"]["DataParams"] == [ + "SND", + "0x10", + "0x00", + "0x05", + "0x08", + "RCV", + "4", + "long", + ] diff --git a/tests/error_handling/test_error_handling_sections.py b/tests/error_handling/test_error_handling_sections.py index 68201fc..bf23ffb 100644 --- a/tests/error_handling/test_error_handling_sections.py +++ b/tests/error_handling/test_error_handling_sections.py @@ -78,9 +78,11 @@ def test_error_handling_measurement_subsection_error(): assert measurement["BYTE_ORDER"] == "MSB_FIRST" assert measurement["REF_MEMORY_SEGMENT"] == "Data2" assert measurement["PHYS_UNIT"] == '"mph"' - assert measurement["FUNCTION_LIST"] == {'Name': ['ID_ADJUSTM', 'FL_ADJUSTM']} - assert measurement["IF_DATA"] == {'Name': 'ISO', - 'DataParams': ['SND', '0x10', '0x00', '0x05', '0x08', 'RCV', '4', 'long']} + assert measurement["FUNCTION_LIST"] == {"Name": ["ID_ADJUSTM", "FL_ADJUSTM"]} + assert measurement["IF_DATA"] == { + "Name": "ISO", + "DataParams": ["SND", "0x10", "0x00", "0x05", "0x08", "RCV", "4", "long"], + } assert measurement["ANNOTATION"]["ANNOTATION_LABEL"] == '"annotation test label"' assert measurement["ANNOTATION"]["ANNOTATION_TEXT"] == ['"annotation text example"'] diff --git a/tests/error_handling/test_error_handling_unknown_tokens.py b/tests/error_handling/test_error_handling_unknown_tokens.py index 54faa1d..9e50e4a 100644 --- a/tests/error_handling/test_error_handling_unknown_tokens.py +++ b/tests/error_handling/test_error_handling_unknown_tokens.py @@ -120,4 +120,13 @@ def test_error_handling_unknown_tokens_post(): assert module["FRAME"]["ScalingUnit"] == "2" assert module["FRAME"]["Rate"] == "3" assert module["IF_DATA"]["Name"] == "ISO" - assert module["IF_DATA"]["DataParams"] == ['SND', '0x10', '0x00', '0x05', '0x08', 'RCV', '4', 'long'] + assert module["IF_DATA"]["DataParams"] == [ + "SND", + "0x10", + "0x00", + "0x05", + "0x08", + "RCV", + "4", + "long", + ] diff --git a/tests/fixture_utils.py b/tests/fixture_utils.py index 3476073..dc33390 100644 --- a/tests/fixture_utils.py +++ b/tests/fixture_utils.py @@ -23,6 +23,21 @@ import pytest +@pytest.fixture +def create_file(): + """ + Fixture for creating a file inside a temporary directory for testing. + """ + + def _create_file(tempdir, filename, content): + file_path = os.path.join(tempdir, filename) + with open(file_path, "w", encoding="utf-8") as file: + file.write(content) + return file_path + + return _create_file + + @pytest.fixture def check_files_exist(): """ diff --git a/tests/integration/test_integration_asap2_demo_v161.py b/tests/integration/test_integration_asap2_demo_v161.py index b399a54..dbd51b0 100644 --- a/tests/integration/test_integration_asap2_demo_v161.py +++ b/tests/integration/test_integration_asap2_demo_v161.py @@ -23,7 +23,7 @@ from pathlib import Path from a2lparser import A2L_PACKAGE_DIR from a2lparser.main import main -from tests.fixture_utils import compare_files, check_files_exist +# from tests.fixture_utils import compare_files, check_files_exist def test_integration_asap2_demo_v161(monkeypatch, compare_files, check_files_exist): @@ -34,12 +34,21 @@ def test_integration_asap2_demo_v161(monkeypatch, compare_files, check_files_exi temp_test_output_path = A2L_PACKAGE_DIR / "../testfiles" temp_test_dir_prefix = "temp_dir_output_" - with tempfile.TemporaryDirectory(dir=temp_test_output_path, prefix=temp_test_dir_prefix) as tempdir: - monkeypatch.setattr("sys.argv", [ - "a2lparser", f"testfiles/A2L/{a2l_filename}", - "--json", "--xml", "--yaml", - "--output-dir", f"\"{Path(tempdir).resolve().as_posix()}\"", - ]) + with tempfile.TemporaryDirectory( + dir=temp_test_output_path, prefix=temp_test_dir_prefix + ) as tempdir: + monkeypatch.setattr( + "sys.argv", + [ + "a2lparser", + f"testfiles/A2L/{a2l_filename}", + "--json", + "--xml", + "--yaml", + "--output-dir", + f'"{Path(tempdir).resolve().as_posix()}"', + ], + ) main() # Check if files were generated diff --git a/tests/integration/test_integration_asap2_demo_v171.py b/tests/integration/test_integration_asap2_demo_v171.py index 31d0565..60d4de0 100644 --- a/tests/integration/test_integration_asap2_demo_v171.py +++ b/tests/integration/test_integration_asap2_demo_v171.py @@ -23,7 +23,7 @@ from pathlib import Path from a2lparser import A2L_PACKAGE_DIR from a2lparser.main import main -from tests.fixture_utils import compare_files, check_files_exist +# from tests.fixture_utils import compare_files, check_files_exist def test_integration_asap2_demo_v171(monkeypatch, compare_files, check_files_exist): @@ -34,12 +34,21 @@ def test_integration_asap2_demo_v171(monkeypatch, compare_files, check_files_exi temp_test_output_path = A2L_PACKAGE_DIR / "../testfiles" temp_test_dir_prefix = "temp_dir_output_" - with tempfile.TemporaryDirectory(dir=temp_test_output_path, prefix=temp_test_dir_prefix) as tempdir: - monkeypatch.setattr("sys.argv", [ - "a2lparser", f"testfiles/A2L/{a2l_filename}", - "--json", "--xml", "--yaml", - "--output-dir", f"\"{Path(tempdir).resolve().as_posix()}\"", - ]) + with tempfile.TemporaryDirectory( + dir=temp_test_output_path, prefix=temp_test_dir_prefix + ) as tempdir: + monkeypatch.setattr( + "sys.argv", + [ + "a2lparser", + f"testfiles/A2L/{a2l_filename}", + "--json", + "--xml", + "--yaml", + "--output-dir", + f'"{Path(tempdir).resolve().as_posix()}"', + ], + ) main() # Check if files were generated diff --git a/tests/integration/test_integration_nested_includes.py b/tests/integration/test_integration_nested_includes.py index eb7aac2..36d0249 100644 --- a/tests/integration/test_integration_nested_includes.py +++ b/tests/integration/test_integration_nested_includes.py @@ -23,7 +23,7 @@ from pathlib import Path from a2lparser import A2L_PACKAGE_DIR from a2lparser.main import main -from tests.fixture_utils import compare_files, check_files_exist +# from tests.fixture_utils import compare_files, check_files_exist def test_integration_nested_includes(monkeypatch, compare_files, check_files_exist): @@ -34,12 +34,21 @@ def test_integration_nested_includes(monkeypatch, compare_files, check_files_exi temp_test_output_path = A2L_PACKAGE_DIR / "../testfiles" temp_test_dir_prefix = "temp_dir_output_" - with tempfile.TemporaryDirectory(dir=temp_test_output_path, prefix=temp_test_dir_prefix) as tempdir: - monkeypatch.setattr("sys.argv", [ - "a2lparser", f"testfiles/A2L/{a2l_filename}", - "--json", "--xml", "--yaml", - "--output-dir", f"\"{Path(tempdir).resolve().as_posix()}\"", - ]) + with tempfile.TemporaryDirectory( + dir=temp_test_output_path, prefix=temp_test_dir_prefix + ) as tempdir: + monkeypatch.setattr( + "sys.argv", + [ + "a2lparser", + f"testfiles/A2L/{a2l_filename}", + "--json", + "--xml", + "--yaml", + "--output-dir", + f'"{Path(tempdir).resolve().as_posix()}"', + ], + ) main() # Check if files were generated diff --git a/tests/integration/test_integration_version.py b/tests/integration/test_integration_version.py index f8f88bd..8c71683 100644 --- a/tests/integration/test_integration_version.py +++ b/tests/integration/test_integration_version.py @@ -31,7 +31,7 @@ def test_integration_version_argument(monkeypatch, capsys): Calls "a2lparser --version" and checks if the output contains the expected version information. """ # Modify sys.argv to include the version argument - monkeypatch.setattr('sys.argv', ['a2lparser', '--version']) + monkeypatch.setattr("sys.argv", ["a2lparser", "--version"]) # Call main function with pytest.raises(SystemExit): diff --git a/tests/parser/test_parser_find_includes.py b/tests/parser/test_parser_find_includes.py index 0646bfd..10e27f6 100644 --- a/tests/parser/test_parser_find_includes.py +++ b/tests/parser/test_parser_find_includes.py @@ -23,21 +23,41 @@ from a2lparser.a2lparser import A2LParser -@pytest.mark.parametrize('matching_includes, expected_filename', [ - (r'/include "C:\DATA\ECU.A2L"', r'C:\DATA\ECU.A2L'), - (r'/INCLUDE "..\includes\CANAPE_OET.A2L"', r'..\includes\CANAPE_OET.A2L'), - (r'/INCLUDE "\\server1\documents\templates\template.aml"', r'\\server1\documents\templates\template.aml'), - (r'/Include ECU_DATA.a2l', r'ECU_DATA.a2l'), - (r'/include "AML Template.aml"', r'AML Template.aml'), - (r'/include "/home/user/A2L\ Files/ECU_1221.A2L"', r'/home/user/A2L\ Files/ECU_1221.A2L'), - (r'/include "../../../blobs/TP_BLOB.a2l"', r'../../../blobs/TP_BLOB.a2l'), - (r'/begin A2ML /include "/home/user/IF_DATA/XCP.txt" STV_N /* name */ /end A2ML', r'/home/user/IF_DATA/XCP.txt'), - (r'/begin MODULE /Include My_Module.A2L UNIT 2 "" "" DERIVED /end MODULE', r'My_Module.A2L'), - (r'/begin PROJECT /include "\\Z:\tmp\ecu project.a2l" /* EOF */ /end PROJECT', r'\\Z:\tmp\ecu project.a2l'), - (r'/begin PROJECT /include "/home/ecu/header/my_header.a2l" /include my_module.a2l /end PROJECT', - [r'/home/ecu/header/my_header.a2l', r'my_module.a2l']), - (r'/begin MODULE /begin MEASUREMENT /end MEASUREMENT /end MODULE', None) -]) +@pytest.mark.parametrize( + "matching_includes, expected_filename", + [ + (r'/include "C:\DATA\ECU.A2L"', r"C:\DATA\ECU.A2L"), + (r'/INCLUDE "..\includes\CANAPE_OET.A2L"', r"..\includes\CANAPE_OET.A2L"), + ( + r'/INCLUDE "\\server1\documents\templates\template.aml"', + r"\\server1\documents\templates\template.aml", + ), + (r"/Include ECU_DATA.a2l", r"ECU_DATA.a2l"), + (r'/include "AML Template.aml"', r"AML Template.aml"), + (r'/include "/home/user/A2L\ Files/ECU_1221.A2L"', r"/home/user/A2L\ Files/ECU_1221.A2L"), + (r'/include "../../../blobs/TP_BLOB.a2l"', r"../../../blobs/TP_BLOB.a2l"), + ( + r'/begin A2ML /include "/home/user/IF_DATA/XCP.txt" STV_N /* name */ /end A2ML', + r"/home/user/IF_DATA/XCP.txt", + ), + ( + r'/begin MODULE /Include My_Module.A2L UNIT 2 "" "" DERIVED /end MODULE', + r"My_Module.A2L", + ), + ( + r'/begin PROJECT /include "\\Z:\tmp\ecu project.a2l" /* EOF */ /end PROJECT', + r"\\Z:\tmp\ecu project.a2l", + ), + ( + ( + r'/begin PROJECT /include "/home/ecu/header/my_header.a2l" ' + r"/include my_module.a2l /end PROJECT" + ), + [r"/home/ecu/header/my_header.a2l", r"my_module.a2l"], + ), + (r"/begin MODULE /begin MEASUREMENT /end MEASUREMENT /end MODULE", None), + ], +) def test_parser_load_file_include_mechanism(matching_includes, expected_filename): """ Tests the include mechanism of the A2L parser. diff --git a/tests/parser/test_parser_load_file.py b/tests/parser/test_parser_load_file.py index 6de0873..cdadbf5 100644 --- a/tests/parser/test_parser_load_file.py +++ b/tests/parser/test_parser_load_file.py @@ -26,20 +26,6 @@ from a2lparser.a2lparser import A2LParser -@pytest.fixture -def create_file(): - """ - Fixture for creating a file inside a temporary directory for testing. - """ - def _create_file(tempdir, filename, content): - file_path = os.path.join(tempdir, filename) - with open(file_path, "w", encoding="utf-8") as file: - file.write(content) - return file_path - - return _create_file - - @pytest.fixture def a2l_content_sections_tuple() -> tuple: """ @@ -48,27 +34,30 @@ def a2l_content_sections_tuple() -> tuple: Returns: tuple: (project, module, header, characteristic, measurement, if_data) """ - project = ''' + project = """ /begin PROJECT My_Project "" /include My_Header.a2l /include "my_modules/My_Module.a2l" /end PROJECT - ''' - module = ''' + """ + module = """ /begin MODULE /include "my_characteristics/My_Characteristic.a2l" /include "my_measurements/My_Measurement.a2l" /end MODULE - ''' + """ header = '/begin HEADER "Test Project for include mechanism" "T_P1" TP1 /end HEADER' - characteristic = '/begin CHARACTERISTIC NAME "" MAP 0x7140 DAMOS_KF10.0 R_VOLTAGE 0.0 15.0 /end CHARACTERISTIC' - measurement = ''' + characteristic = ( + '/begin CHARACTERISTIC NAME "" MAP 0x7140 DAMOS_KF 10.0 R_VOLTAGE 0.0 15.0' + "/end CHARACTERISTIC" + ) + measurement = """ /begin MEASUREMENT N "" UWORD R_VOLTAGE 1 0 0.0 100.0 /include "../my_if_data/XCP_ref.a2l" /end MEASUREMENT - ''' - if_data = '/begin IF_DATA XCP LINK_MAP ref_name 0x003432 /end IF_DATA' + """ + if_data = "/begin IF_DATA XCP LINK_MAP ref_name 0x003432 /end IF_DATA" return (project, module, header, characteristic, measurement, if_data) @@ -80,12 +69,12 @@ def test_parser_load_file_simple(create_file, a2l_content_sections_tuple): _, _, _, _, measurement, if_data = a2l_content_sections_tuple # Expected output - expected_content = ''' + expected_content = """ /begin MEASUREMENT N "" UWORD R_VOLTAGE 1 0 0.0 100.0 /begin IF_DATA XCP LINK_MAP ref_name 0x003432 /end IF_DATA /end MEASUREMENT - ''' + """ # Path for temporary directory and files temp_test_output_path = A2L_PACKAGE_DIR / "../testfiles" @@ -95,7 +84,9 @@ def test_parser_load_file_simple(create_file, a2l_content_sections_tuple): parser = A2LParser() # Create temporary directory and files - with tempfile.TemporaryDirectory(dir=temp_test_output_path, prefix=temp_test_dir_prefix) as tempdir: + with tempfile.TemporaryDirectory( + dir=temp_test_output_path, prefix=temp_test_dir_prefix + ) as tempdir: my_measurements_dir = os.path.join(tempdir, "my_measurements") os.makedirs(my_measurements_dir) measurement_file = create_file(my_measurements_dir, "My_Measurement.a2l", measurement) @@ -119,26 +110,30 @@ def test_parser_load_file_complex(create_file, a2l_content_sections_tuple): My_Project.a2l -> My_Module.a2l -> My_Characteristic.a2l & My_Measurement.a2l """ # Test files content - (my_project, - my_module, - my_header, - my_characteristic, - my_measurement, - my_if_data) = a2l_content_sections_tuple + (my_project, my_module, my_header, my_characteristic, my_measurement, my_if_data) = ( + a2l_content_sections_tuple + ) # Expected content after includes - expected_content = ''' + expected_content = ( + """ /begin PROJECT My_Project "" /begin HEADER "Test Project for include mechanism" "T_P1" TP1 /end HEADER /begin MODULE - /begin CHARACTERISTIC NAME "" MAP 0x7140 DAMOS_KF10.0 R_VOLTAGE 0.0 15.0 /end CHARACTERISTIC + /begin CHARACTERISTIC NAME "" MAP 0x7140 DAMOS_KF 10.0 R_VOLTAGE 0.0 15.0 + /end CHARACTERISTIC /begin MEASUREMENT N "" UWORD R_VOLTAGE 1 0 0.0 100.0 /begin IF_DATA XCP LINK_MAP ref_name 0x003432 /end IF_DATA /end MEASUREMENT /end MODULE /end PROJECT - '''.replace(" ", "").replace("\n", "").replace("\t", "") + """.replace( + " ", "" + ) + .replace("\n", "") + .replace("\t", "") + ) # Path for temporary directory and files temp_test_output_path = A2L_PACKAGE_DIR / "../testfiles" @@ -148,7 +143,9 @@ def test_parser_load_file_complex(create_file, a2l_content_sections_tuple): parser = A2LParser() # Create temporary directory and files - with tempfile.TemporaryDirectory(dir=temp_test_output_path, prefix=temp_test_dir_prefix) as tempdir: + with tempfile.TemporaryDirectory( + dir=temp_test_output_path, prefix=temp_test_dir_prefix + ) as tempdir: project_file = create_file(tempdir, "My_Project.a2l", my_project) header_file = create_file(tempdir, "My_Header.a2l", my_header) assert os.path.exists(project_file) @@ -161,7 +158,9 @@ def test_parser_load_file_complex(create_file, a2l_content_sections_tuple): my_characteristics_dir = os.path.join(my_modules_dir, "my_characteristics") os.makedirs(my_characteristics_dir) - characteristic_file = create_file(my_characteristics_dir, "My_Characteristic.a2l", my_characteristic) + characteristic_file = create_file( + my_characteristics_dir, "My_Characteristic.a2l", my_characteristic + ) assert os.path.exists(characteristic_file) my_measurements_dir = os.path.join(my_modules_dir, "my_measurements") diff --git a/tests/rules/test_rules_a2ml.py b/tests/rules/test_rules_a2ml.py index 51c132f..8e90290 100644 --- a/tests/rules/test_rules_a2ml.py +++ b/tests/rules/test_rules_a2ml.py @@ -99,11 +99,11 @@ def test_rules_a2ml_full_content(): block "IF_DATA" taggedunion if_data { - /* ============================================================================================== */ - /* */ - /* ASAM XCP AML */ - /* */ - /* ============================================================================================== */ + /* ====================================================================================== */ + /* */ + /* ASAM XCP AML */ + /* */ + /* ====================================================================================== */ "XCP" struct { taggedstruct { @@ -191,14 +191,19 @@ def test_rules_a2ml_full_content(): assert asap2_version == {"VersionNo": "1", "UpgradeNo": "71"} assert project["Name"] == "ASAP2_Example" assert project["LongIdentifier"] == '""' - assert header == {"Comment": '"ASAP2 Example File"', - "PROJECT_NO": "P2016_09_AE_MCD_2MC_BS_V1_7_1_main", - "VERSION": '"V1.7.1"'} + assert header == { + "Comment": '"ASAP2 Example File"', + "PROJECT_NO": "P2016_09_AE_MCD_2MC_BS_V1_7_1_main", + "VERSION": '"V1.7.1"', + } assert module["A2ML"] == a2ml_content - assert module["MOD_COMMON"] == {"Comment": '""', "ALIGNMENT_BYTE": "1", - "ALIGNMENT_FLOAT32_IEEE": "4", - "ALIGNMENT_FLOAT64_IEEE": "4", - "ALIGNMENT_LONG": "4", - "ALIGNMENT_WORD": "2", - "BYTE_ORDER": "MSB_LAST", - "DEPOSIT": "ABSOLUTE"} + assert module["MOD_COMMON"] == { + "Comment": '""', + "ALIGNMENT_BYTE": "1", + "ALIGNMENT_FLOAT32_IEEE": "4", + "ALIGNMENT_FLOAT64_IEEE": "4", + "ALIGNMENT_LONG": "4", + "ALIGNMENT_WORD": "2", + "BYTE_ORDER": "MSB_LAST", + "DEPOSIT": "ABSOLUTE", + } diff --git a/tests/rules/test_rules_annotation.py b/tests/rules/test_rules_annotation.py index 7ce687e..484cbf3 100644 --- a/tests/rules/test_rules_annotation.py +++ b/tests/rules/test_rules_annotation.py @@ -22,7 +22,7 @@ from a2lparser.a2l.a2l_yacc import A2LYacc -def test_rules_annotation(): +def test_rules_annotation(): # sourcery skip: extract-duplicate-method """ Tests parsing a valid "ANNOTATION" block. """ @@ -54,7 +54,11 @@ def test_rules_annotation(): annotation_1 = annotation[0] assert annotation_1["ANNOTATION_LABEL"] == '"valid_section_1"' assert annotation_1["ANNOTATION_ORIGIN"] == '"first block origin"' - assert annotation_1["ANNOTATION_TEXT"] == ['"string_literal_1"', '"STRING_LITERAL_2"', '"STRING LITERAL 3"'] + assert annotation_1["ANNOTATION_TEXT"] == [ + '"string_literal_1"', + '"STRING_LITERAL_2"', + '"STRING LITERAL 3"', + ] annotation_2 = annotation[1] assert annotation_2["ANNOTATION_LABEL"] == '"valid_section_2"' diff --git a/tests/rules/test_rules_axis_descr.py b/tests/rules/test_rules_axis_descr.py index e021f92..daf76b4 100644 --- a/tests/rules/test_rules_axis_descr.py +++ b/tests/rules/test_rules_axis_descr.py @@ -127,4 +127,7 @@ def test_rules_axis_descr_full(): assert axis_descr["ANNOTATION"][0]["ANNOTATION_TEXT"] == ['"ANNOTATION_TEXT_BLOCK_1"'] assert axis_descr["ANNOTATION"][1]["ANNOTATION_LABEL"] == '"ANNOTATION_LABEL_BLOCK_2"' assert axis_descr["ANNOTATION"][1]["ANNOTATION_ORIGIN"] == '"ANNOTATION_ORIGIN_BLOCK_2"' - assert axis_descr["ANNOTATION"][1]["ANNOTATION_TEXT"] == ['"ANNOTATION_TEXT_BLOCK_2_1"', '"ANNOTATION_TEXT_BLOCK_2_2"'] + assert axis_descr["ANNOTATION"][1]["ANNOTATION_TEXT"] == [ + '"ANNOTATION_TEXT_BLOCK_2_1"', + '"ANNOTATION_TEXT_BLOCK_2_2"', + ] diff --git a/tests/rules/test_rules_blob.py b/tests/rules/test_rules_blob.py index 4b30638..4314fc9 100644 --- a/tests/rules/test_rules_blob.py +++ b/tests/rules/test_rules_blob.py @@ -110,14 +110,18 @@ def test_rules_blob_full(): assert blob["CALIBRATION_ACCESS"] == "NOT_IN_MCD_SYSTEM" assert blob["DISPLAY_IDENTIFIER"] == "Data_Blob_Display" assert blob["ECU_ADDRESS_EXTENSION"] == "0" - assert blob["MAX_REFRESH"] == {'ScalingUnit': '5', 'Rate': '15'} + assert blob["MAX_REFRESH"] == {"ScalingUnit": "5", "Rate": "15"} assert blob["MODEL_LINK"] == '"binary/blobs/TP_BLOB.obj"' - assert blob["SYMBOL_LINK"] == {'SymbolName': '"_XCP_BLOB"', 'Offset': '256'} - assert blob["ANNOTATION"][0] == {'ANNOTATION_LABEL': '"Data_Blob Description"', - 'ANNOTATION_TEXT': ['"Data_Blob_Description"', '"Data_Blob_Placeholder"']} - assert blob["ANNOTATION"][1] == {'ANNOTATION_LABEL': '"MODEL_LINK DESCRIPTION"', - 'ANNOTATION_ORIGIN': '"MODEL_LINK ORIGIN"', - 'ANNOTATION_TEXT': ['"SwcBlobPlaceholder_1"', '"SwcBlobPlaceholder_2"']} + assert blob["SYMBOL_LINK"] == {"SymbolName": '"_XCP_BLOB"', "Offset": "256"} + assert blob["ANNOTATION"][0] == { + "ANNOTATION_LABEL": '"Data_Blob Description"', + "ANNOTATION_TEXT": ['"Data_Blob_Description"', '"Data_Blob_Placeholder"'], + } + assert blob["ANNOTATION"][1] == { + "ANNOTATION_LABEL": '"MODEL_LINK DESCRIPTION"', + "ANNOTATION_ORIGIN": '"MODEL_LINK ORIGIN"', + "ANNOTATION_TEXT": ['"SwcBlobPlaceholder_1"', '"SwcBlobPlaceholder_2"'], + } # IF_DATA Asserts assert len(blob["IF_DATA"]) == 2 @@ -128,14 +132,37 @@ def test_rules_blob_full(): assert if_data_odb["Name"] == "ODB" assert if_data_odb["BINARY_SOURCE"]["Name"] == "BINARY_SOURCE" - assert if_data_odb["BINARY_SOURCE"]["DataParams"] == ['"BINARY_BLOB_ARRAY"', '103', '1', 'QP_BLOB', - '0x100', '1', '23', '1952251460', '1020', '2952232964'] + assert if_data_odb["BINARY_SOURCE"]["DataParams"] == [ + '"BINARY_BLOB_ARRAY"', + "103", + "1", + "QP_BLOB", + "0x100", + "1", + "23", + "1952251460", + "1020", + "2952232964", + ] assert if_data_odb["TP_BLOB"]["Name"] == "TP_BLOB" - assert if_data_odb["TP_BLOB"]["DataParams"] == ['"TP_BLOB_ARRAY"', 'HEX_PAGE', '0x400', '0xAFF7C84C', '0xDC'] + assert if_data_odb["TP_BLOB"]["DataParams"] == [ + '"TP_BLOB_ARRAY"', + "HEX_PAGE", + "0x400", + "0xAFF7C84C", + "0xDC", + ] assert if_data_odb["TP_BLOB"]["TRIGGER_BLOB"]["Name"] == "TRIGGER_BLOB" - assert if_data_odb["TP_BLOB"]["TRIGGER_BLOB"]["DataParams"] == ['0xFF', '0x63', '0xCF', '0x7F', '0x81', '0x84'] + assert if_data_odb["TP_BLOB"]["TRIGGER_BLOB"]["DataParams"] == [ + "0xFF", + "0x63", + "0xCF", + "0x7F", + "0x81", + "0x84", + ] assert if_data_xcp["Name"] == "XCP_TEST" - assert if_data_xcp["DataParams"] == ['0xFF', '0xFF00'] + assert if_data_xcp["DataParams"] == ["0xFF", "0xFF00"] assert if_data_xcp["XCP_BLOB"]["Name"] == "XCP_BLOB" - assert if_data_xcp["XCP_BLOB"]["DataParams"] == ['0xAAFFEE00'] + assert if_data_xcp["XCP_BLOB"]["DataParams"] == ["0xAAFFEE00"] diff --git a/tests/rules/test_rules_calibration_method.py b/tests/rules/test_rules_calibration_method.py index 2e14c7f..3877062 100644 --- a/tests/rules/test_rules_calibration_method.py +++ b/tests/rules/test_rules_calibration_method.py @@ -63,7 +63,13 @@ def test_rules_calibration_method(): assert len(calibration_method_handle) == 3 assert calibration_method_handle[0]["Handle"] == ["0", "0xBF000000", "0x10000"] assert calibration_method_handle[1]["Handle"] == ["1", "0xBF010000", "0x20000"] - assert calibration_method_handle[2]["Handle"] == ["0x10000", "0x200", "0x4", "0x10000", "0x10000"] + assert calibration_method_handle[2]["Handle"] == [ + "0x10000", + "0x200", + "0x4", + "0x10000", + "0x10000", + ] calibration_handle_text = calibration_method_handle[2]["CALIBRATION_HANDLE_TEXT"].Text assert calibration_handle_text == '"Nmot"' diff --git a/tests/rules/test_rules_characteristic.py b/tests/rules/test_rules_characteristic.py index cfd1e4b..55c5a6b 100644 --- a/tests/rules/test_rules_characteristic.py +++ b/tests/rules/test_rules_characteristic.py @@ -212,7 +212,7 @@ def test_rules_characteristic_full(): assert characteristic["EXTENDED_LIMITS"]["UpperLimit"] == "4000.0" assert characteristic["FUNCTION_LIST"]["Name"] == ["ID_ADJUSTM", "FL_ADJUSTM", "SPEED_LIM"] assert characteristic["MAP_LIST"]["Name"] == ["one", "two", "three"] - assert characteristic["MATRIX_DIM"] == ['2', '4', '3'] + assert characteristic["MATRIX_DIM"] == ["2", "4", "3"] assert characteristic["MAX_REFRESH"]["ScalingUnit"] == "3" assert characteristic["MAX_REFRESH"]["Rate"] == "15" assert characteristic["SYMBOL_LINK"]["SymbolName"] == '"_VehicleSpeed"' @@ -220,10 +220,20 @@ def test_rules_characteristic_full(): assert characteristic["VIRTUAL_CHARACTERISTIC"]["FORMULA"] == '"sin(X1)"' assert characteristic["VIRTUAL_CHARACTERISTIC"]["CHARACTERISTIC"] == ["B"] assert len(characteristic["ANNOTATION"]) == 2 - assert characteristic["ANNOTATION"][0]["ANNOTATION_LABEL"] == '"ANNOTATION_LABEL_CHARACTERISTIC_1"' - assert characteristic["ANNOTATION"][0]["ANNOTATION_ORIGIN"] == '"ANNOTATION_ORIGIN_CHARACTERISTIC_1"' - assert characteristic["ANNOTATION"][1]["ANNOTATION_LABEL"] == '"ANNOTATION_LABEL_CHARACTERISTIC_2"' - assert characteristic["ANNOTATION"][1]["ANNOTATION_ORIGIN"] == '"ANNOTATION_ORIGIN_CHARACTERISTIC_2"' + assert ( + characteristic["ANNOTATION"][0]["ANNOTATION_LABEL"] == '"ANNOTATION_LABEL_CHARACTERISTIC_1"' + ) + assert ( + characteristic["ANNOTATION"][0]["ANNOTATION_ORIGIN"] + == '"ANNOTATION_ORIGIN_CHARACTERISTIC_1"' + ) + assert ( + characteristic["ANNOTATION"][1]["ANNOTATION_LABEL"] == '"ANNOTATION_LABEL_CHARACTERISTIC_2"' + ) + assert ( + characteristic["ANNOTATION"][1]["ANNOTATION_ORIGIN"] + == '"ANNOTATION_ORIGIN_CHARACTERISTIC_2"' + ) assert characteristic["ANNOTATION"][1]["ANNOTATION_TEXT"] == ['"ANNOTATION_TEXT_CHARACERISTIC"'] assert len(characteristic["AXIS_DESCR"]) == 2 assert characteristic["AXIS_DESCR"][0]["Attribute"] == "STD_AXIS" @@ -252,12 +262,28 @@ def test_rules_characteristic_full(): assert characteristic["AXIS_DESCR"][0]["FIX_AXIS_PAR_DIST"]["Numberapo"] == "8" assert characteristic["AXIS_DESCR"][0]["FIX_AXIS_PAR_LIST"]["AxisPts_Value"] == ["2", "5", "9"] assert len(characteristic["AXIS_DESCR"][0]["ANNOTATION"]) == 2 - assert characteristic["AXIS_DESCR"][0]["ANNOTATION"][0]["ANNOTATION_LABEL"] == '"AXIS_DESCR_LABEL_BLOCK_1"' - assert characteristic["AXIS_DESCR"][0]["ANNOTATION"][0]["ANNOTATION_ORIGIN"] == '"AXIS_DESCR_ORIGIN_BLOCK_1"' - assert characteristic["AXIS_DESCR"][0]["ANNOTATION"][0]["ANNOTATION_TEXT"] == ['"AXIS_DESCR_TEXT_BLOCK_1"'] - assert characteristic["AXIS_DESCR"][0]["ANNOTATION"][1]["ANNOTATION_LABEL"] == '"AXIS_DESCR_LABEL_BLOCK_2"' - assert characteristic["AXIS_DESCR"][0]["ANNOTATION"][1]["ANNOTATION_ORIGIN"] == '"AXIS_DESCR_ORIGIN_BLOCK_2"' - assert characteristic["AXIS_DESCR"][0]["ANNOTATION"][1]["ANNOTATION_TEXT"] == ['"AXIS_DESCR_TEXT_BLOCK_2"'] + assert ( + characteristic["AXIS_DESCR"][0]["ANNOTATION"][0]["ANNOTATION_LABEL"] + == '"AXIS_DESCR_LABEL_BLOCK_1"' + ) + assert ( + characteristic["AXIS_DESCR"][0]["ANNOTATION"][0]["ANNOTATION_ORIGIN"] + == '"AXIS_DESCR_ORIGIN_BLOCK_1"' + ) + assert characteristic["AXIS_DESCR"][0]["ANNOTATION"][0]["ANNOTATION_TEXT"] == [ + '"AXIS_DESCR_TEXT_BLOCK_1"' + ] + assert ( + characteristic["AXIS_DESCR"][0]["ANNOTATION"][1]["ANNOTATION_LABEL"] + == '"AXIS_DESCR_LABEL_BLOCK_2"' + ) + assert ( + characteristic["AXIS_DESCR"][0]["ANNOTATION"][1]["ANNOTATION_ORIGIN"] + == '"AXIS_DESCR_ORIGIN_BLOCK_2"' + ) + assert characteristic["AXIS_DESCR"][0]["ANNOTATION"][1]["ANNOTATION_TEXT"] == [ + '"AXIS_DESCR_TEXT_BLOCK_2"' + ] assert characteristic["AXIS_DESCR"][1]["Attribute"] == "STD_AXIS" assert characteristic["AXIS_DESCR"][1]["InputQuantity"] == "AMOUNT" assert characteristic["AXIS_DESCR"][1]["CONVERSION"] == "CON_ME" diff --git a/tests/rules/test_rules_def_characteristic.py b/tests/rules/test_rules_def_characteristic.py index 40a2984..f31ff49 100644 --- a/tests/rules/test_rules_def_characteristic.py +++ b/tests/rules/test_rules_def_characteristic.py @@ -34,5 +34,6 @@ def test_rules_def_characteristic(): """ ast = A2LYacc().generate_ast(characteristic_block) assert ast - assert ast["DEF_CHARACTERISTIC"] == {'Identifier': - ['INJECTION_CURVE', 'DELAY_FACTOR', 'RANDOM_FACTOR']} + assert ast["DEF_CHARACTERISTIC"] == { + "Identifier": ["INJECTION_CURVE", "DELAY_FACTOR", "RANDOM_FACTOR"] + } diff --git a/tests/rules/test_rules_function.py b/tests/rules/test_rules_function.py index e323cf6..e91246c 100644 --- a/tests/rules/test_rules_function.py +++ b/tests/rules/test_rules_function.py @@ -93,9 +93,15 @@ def test_rules_function(): assert function["FUNCTION_VERSION"] == '"BG5.0815"' assert function["DEF_CHARACTERISTIC"]["Identifier"] == ["INJECTION_CURVE", "DELAY_FACTOR"] assert function["IN_MEASUREMENT"]["Identifier"] == ["WHEEL_REVOLUTIONS", "ENGINE_SPEED"] - assert function["LOC_MEASUREMENT"]["Identifier"] == ["SsmInLastWinsThreshold", "SsmInThrottlePedalPosition"] + assert function["LOC_MEASUREMENT"]["Identifier"] == [ + "SsmInLastWinsThreshold", + "SsmInThrottlePedalPosition", + ] assert function["OUT_MEASUREMENT"]["Identifier"] == ["OK_FLAG", "SENSOR_FLAG"] - assert function["REF_CHARACTERISTIC"]["Identifier"] == ["ENG_SPEED_CORR_CURVE", "ENG_SPEED_CORR_CURVE_STD"] + assert function["REF_CHARACTERISTIC"]["Identifier"] == [ + "ENG_SPEED_CORR_CURVE", + "ENG_SPEED_CORR_CURVE_STD", + ] assert function["AR_COMPONENT"]["ComponentType"] == '"ApplicationSwComponentType"' assert function["AR_COMPONENT"]["AR_PROTOTYPE_OF"] == '"HANDLE"' assert function["ANNOTATION"][0]["ANNOTATION_LABEL"] == '"ANNOTATION_LABEL_BLOCK_1"' @@ -117,6 +123,6 @@ def test_rules_function(): assert if_data_map assert if_data_xcp assert if_data_map["Name"] == "MAP_REF_ADDR" - assert if_data_map["DataParams"] == ['LINK_MAP', 'ref_name', '0x003432'] + assert if_data_map["DataParams"] == ["LINK_MAP", "ref_name", "0x003432"] assert if_data_xcp["Name"] == "XCP_REF_ADDR" - assert if_data_xcp["DataParams"] == ['XCP_REF_MAP', '0x00332266'] + assert if_data_xcp["DataParams"] == ["XCP_REF_MAP", "0x00332266"] diff --git a/tests/rules/test_rules_group.py b/tests/rules/test_rules_group.py index 5c6803d..2adb043 100644 --- a/tests/rules/test_rules_group.py +++ b/tests/rules/test_rules_group.py @@ -79,8 +79,14 @@ def test_rules_group(): assert group["GroupLongIdentifier"] == '"Subsystem"' assert group["ROOT"] is True assert group["FUNCTION_LIST"]["Name"] == ["ID_ADJUSTM", "FL_ADJUSTM", "SPEED_LIM"] - assert group["REF_CHARACTERISTIC"]["Identifier"] == ["ENG_SPEED_CORR_CURVE", "XAS_ENG_SPEED_CORR_CURVE"] - assert group["REF_MEASUREMENT"]["Identifier"] == ["CustBsw_Init_Current_Loop", "CustBsw_Init_Previous_Loop"] + assert group["REF_CHARACTERISTIC"]["Identifier"] == [ + "ENG_SPEED_CORR_CURVE", + "XAS_ENG_SPEED_CORR_CURVE", + ] + assert group["REF_MEASUREMENT"]["Identifier"] == [ + "CustBsw_Init_Current_Loop", + "CustBsw_Init_Previous_Loop", + ] assert group["SUB_GROUP"]["Identifier"] == ["AAS", "LRG", "Sar"] assert group["ANNOTATION"][0]["ANNOTATION_LABEL"] == '"ANNOTATION_LABEL_BLOCK_1"' assert group["ANNOTATION"][0]["ANNOTATION_ORIGIN"] == '"ANNOTATION_ORIGIN_BLOCK_1"' @@ -94,6 +100,6 @@ def test_rules_group(): assert if_data_map assert if_data_xcp assert if_data_map["Name"] == "MAP_REF_ADDR" - assert if_data_map["DataParams"] == ['LINK_MAP', 'ref_name', '0x003432'] + assert if_data_map["DataParams"] == ["LINK_MAP", "ref_name", "0x003432"] assert if_data_xcp["Name"] == "XCP_REF_ADDR" - assert if_data_xcp["DataParams"] == ['XCP_REF_MAP', '0x00332266'] + assert if_data_xcp["DataParams"] == ["XCP_REF_MAP", "0x00332266"] diff --git a/tests/rules/test_rules_if_data.py b/tests/rules/test_rules_if_data.py index 13f946a..2035a3e 100644 --- a/tests/rules/test_rules_if_data.py +++ b/tests/rules/test_rules_if_data.py @@ -79,7 +79,10 @@ def test_rules_if_data(): daq_timestamp_supported = if_data_daq["TIMESTAMP_SUPPORTED"] assert daq_timestamp_supported["Name"] == "TIMESTAMP_SUPPORTED" - assert daq_timestamp_supported["DataParams"] == ["TIMESTAMP_SUPPORTED_PARAM_FIRST", "TIMESTAMP_SUPPORTED_PARAM_LAST"] + assert daq_timestamp_supported["DataParams"] == [ + "TIMESTAMP_SUPPORTED_PARAM_FIRST", + "TIMESTAMP_SUPPORTED_PARAM_LAST", + ] daq_event = daq_timestamp_supported["EVENT"] assert daq_event["Name"] == "EVENT" @@ -151,7 +154,9 @@ def test_rules_if_data_including_keywords(): /begin DISTAB_CFG 0xD 0x122 0x2 0x0 0x0 TRG_MOD 0x0 /end DISTAB_CFG - ETK_CFG 0x10 0x1D 0x61 0x1 0x1 0xFF 0xFF 0x63 0xCF 0x7F 0x81 0x84 0x79 0x64 0xB 0x65 0x8C 0x66 0xA0 0x67 0x91 + ETK_CFG 0x10 0x1D 0x61 0x1 0x1 0xFF + 0xFF 0x63 0xCF 0x7F 0x81 0x84 0x79 + 0x64 0xB 0x65 0x8C 0x66 0xA0 0x67 0x91 ETK_MAILBOX 0x11223344 EXRAM 0xAFF7FF00 0xFF EXRAM 0xAFF7FF00 0xFF @@ -345,7 +350,15 @@ def test_rules_if_data_including_keywords(): tp_blob_distab_cfg = tp_blob["DISTAB_CFG"] assert tp_blob_distab_cfg assert tp_blob_distab_cfg["Name"] == "DISTAB_CFG" - assert tp_blob_distab_cfg["DataParams"] == ["0xD", "0x122", "0x2", "0x0", "0x0", "TRG_MOD", "0x0"] + assert tp_blob_distab_cfg["DataParams"] == [ + "0xD", + "0x122", + "0x2", + "0x0", + "0x0", + "TRG_MOD", + "0x0", + ] def test_rules_if_data_empty_ident_block(): @@ -413,20 +426,49 @@ def test_rules_if_data_empty_ident_block(): daq = if_data["DAQ"] assert daq["Name"] == "DAQ" - assert daq["DataParams"] == ["STATIC", "0x0004", "GRANULARITY_ENTRY_SIZE_DAQ_BYTE", "0x04", "NO_OVERLOAD_INDICATION"] + assert daq["DataParams"] == [ + "STATIC", + "0x0004", + "GRANULARITY_ENTRY_SIZE_DAQ_BYTE", + "0x04", + "NO_OVERLOAD_INDICATION", + ] assert daq["DAQ_LIST"] daq_list = daq["DAQ_LIST"] assert len(daq_list) == 3 assert daq_list[0]["Name"] == "DAQ_LIST" - assert daq_list[0]["DataParams"] == ["DAQ_LIST_TYPE", "DAQ", "MAX_ODT", "0x01", "EVENT_FIXED", "0x1001"] + assert daq_list[0]["DataParams"] == [ + "DAQ_LIST_TYPE", + "DAQ", + "MAX_ODT", + "0x01", + "EVENT_FIXED", + "0x1001", + ] assert daq_list[0]["PREDEFINED"] assert daq_list[0]["PREDEFINED"]["ODT"]["DataParams"] == ["0x00", "ODT_ENTRY", "0x04"] assert daq_list[1]["Name"] == "DAQ_LIST" - assert daq_list[1]["DataParams"] == ["0x01", "DAQ_LIST_TYPE", "DAQ", "MAX_ODT", "0x10", "EVENT_FIXED", "0x2001"] + assert daq_list[1]["DataParams"] == [ + "0x01", + "DAQ_LIST_TYPE", + "DAQ", + "MAX_ODT", + "0x10", + "EVENT_FIXED", + "0x2001", + ] assert daq_list[1]["PREDEFINED"] assert daq_list[2]["Name"] == "DAQ_LIST" - assert daq_list[2]["DataParams"] == ["0x02", "DAQ_LIST_TYPE", "DAQ", "MAX_ODT", "0x00", "EVENT_FIXED", "0x4001"] + assert daq_list[2]["DataParams"] == [ + "0x02", + "DAQ_LIST_TYPE", + "DAQ", + "MAX_ODT", + "0x00", + "EVENT_FIXED", + "0x4001", + ] assert daq_list[2]["PREDEFINED"] daq_event = daq["EVENT"] diff --git a/tests/rules/test_rules_instance.py b/tests/rules/test_rules_instance.py index b84e116..08b41f8 100644 --- a/tests/rules/test_rules_instance.py +++ b/tests/rules/test_rules_instance.py @@ -111,22 +111,26 @@ def test_rules_instance_full(): assert instance["LAYOUT"] == "ALTERNATE_WITH_X" assert instance["MODEL_LINK"] == '"system/referer/SwcXcpInstance.referer"' assert instance["READ_WRITE"] is True - assert instance["MATRIX_DIM"] == ['2', '4'] - assert instance["MAX_REFRESH"] == {'ScalingUnit': '120', 'Rate': '250'} - assert instance["SYMBOL_LINK"] == {'SymbolName': '"_InstanceReference"', 'Offset': '0'} + assert instance["MATRIX_DIM"] == ["2", "4"] + assert instance["MAX_REFRESH"] == {"ScalingUnit": "120", "Rate": "250"} + assert instance["SYMBOL_LINK"] == {"SymbolName": '"_InstanceReference"', "Offset": "0"} assert len(instance["IF_DATA"]) == 2 if_data_map = instance["IF_DATA"][0] if_data_xcp = instance["IF_DATA"][1] assert if_data_map assert if_data_xcp assert if_data_map["Name"] == "MAP_REF_ADDR" - assert if_data_map["DataParams"] == ['LINK_MAP', 'ref_name', '0x003432'] + assert if_data_map["DataParams"] == ["LINK_MAP", "ref_name", "0x003432"] assert if_data_xcp["Name"] == "XCP_REF_ADDR" - assert if_data_xcp["DataParams"] == ['XCP_REF_MAP', '0x00332266'] + assert if_data_xcp["DataParams"] == ["XCP_REF_MAP", "0x00332266"] assert len(instance["ANNOTATION"]) == 2 - assert instance["ANNOTATION"][0] == {'ANNOTATION_LABEL': '"INSTANCE_ANNOTATION"'} - assert instance["ANNOTATION"][1] == {'ANNOTATION_TEXT': - ['"INSTANCE_ANNOTATION_DESCRIPTION"', '"This is an instance description"']} + assert instance["ANNOTATION"][0] == {"ANNOTATION_LABEL": '"INSTANCE_ANNOTATION"'} + assert instance["ANNOTATION"][1] == { + "ANNOTATION_TEXT": [ + '"INSTANCE_ANNOTATION_DESCRIPTION"', + '"This is an instance description"', + ] + } assert len(instance["OVERWRITE"]) == 2 overwrite_1 = instance["OVERWRITE"][0] overwrite_2 = instance["OVERWRITE"][1] @@ -136,9 +140,9 @@ def test_rules_instance_full(): assert overwrite_2["Name"] == "XcpInstance" assert overwrite_2["AxisNumber"] == "3" assert overwrite_2["CONVERSION"] == "ConversionMethod1" - assert overwrite_2["EXTENDED_LIMITS"] == {'LowerLimit': '0', 'UpperLimit': '200'} + assert overwrite_2["EXTENDED_LIMITS"] == {"LowerLimit": "0", "UpperLimit": "200"} assert overwrite_2["FORMAT"] == '"%.2f"' assert overwrite_2["INPUT_QUANTITY"] == "Speed2" - assert overwrite_2["LIMITS"] == {'LowerLimit': '0', 'UpperLimit': '160'} + assert overwrite_2["LIMITS"] == {"LowerLimit": "0", "UpperLimit": "160"} assert overwrite_2["MONOTONY"] == "STRICT_DECREASE" assert overwrite_2["PHYS_UNIT"] == '"km/h"' diff --git a/tests/rules/test_rules_measurement.py b/tests/rules/test_rules_measurement.py index 461611c..a6f98e2 100644 --- a/tests/rules/test_rules_measurement.py +++ b/tests/rules/test_rules_measurement.py @@ -141,24 +141,39 @@ def test_rules_measurement_full(): assert measurement["FORMAT"] == '"%.3"' assert measurement["FUNCTION_LIST"]["Name"] == ["FNC_VAL_1", "FNC_VAL_2", "FNC_VAL_3"] assert measurement["LAYOUT"] == "ALTERNATE_WITH_Y" - assert measurement["MATRIX_DIM"] == ["2", "4", "3"] - assert measurement["MAX_REFRESH"] == {'ScalingUnit': '998', 'Rate': '2'} + assert measurement["MATRIX_DIM"] == ["2", "4", "3"] + assert measurement["MAX_REFRESH"] == {"ScalingUnit": "998", "Rate": "2"} assert measurement["MODEL_LINK"] == '"system/measurement/ecu_x79c13.obj"' assert measurement["PHYS_UNIT"] == '"Nm"' assert measurement["READ_WRITE"] is True assert measurement["REF_MEMORY_SEGMENT"] == "Data1" - assert measurement["SYMBOL_LINK"] == {'SymbolName': '"_VehicleSpeed"', 'Offset': '0'} - assert measurement["VIRTUAL"] == {'MeasuringChannel': ['PHI_BASIS', 'PHI_CORR']} - assert measurement["BIT_OPERATION"] == {'RIGHT_SHIFT': {'Bitcount': '4'}, 'SIGN_EXTEND': {'Boolean': True}} + assert measurement["SYMBOL_LINK"] == {"SymbolName": '"_VehicleSpeed"', "Offset": "0"} + assert measurement["VIRTUAL"] == {"MeasuringChannel": ["PHI_BASIS", "PHI_CORR"]} + assert measurement["BIT_OPERATION"] == { + "RIGHT_SHIFT": {"Bitcount": "4"}, + "SIGN_EXTEND": {"Boolean": True}, + } assert len(measurement["IF_DATA"]) == 2 assert measurement["IF_DATA"][0]["Name"] == "CANAPE_EXT" - assert measurement["IF_DATA"][0]["DataParams"] == ['100', 'LINK_MAP', - '"xxx79c13e523bc16dfbba3285.x794ec36d9751f96100"', - '0x2D474', '0x0', '0', '0x0', '1', '0xCF', '0x0', - 'DISPLAY', '0', '-36044.75', '36043.75'] + assert measurement["IF_DATA"][0]["DataParams"] == [ + "100", + "LINK_MAP", + '"xxx79c13e523bc16dfbba3285.x794ec36d9751f96100"', + "0x2D474", + "0x0", + "0", + "0x0", + "1", + "0xCF", + "0x0", + "DISPLAY", + "0", + "-36044.75", + "36043.75", + ] assert measurement["IF_DATA"][1]["Name"] == "CANAPE" - assert measurement["IF_DATA"][1]["DataParams"] == ['DISPLAY', '0', '-36044.75', '36043.75'] + assert measurement["IF_DATA"][1]["DataParams"] == ["DISPLAY", "0", "-36044.75", "36043.75"] assert len(measurement["ANNOTATION"]) == 2 assert measurement["ANNOTATION"][0]["ANNOTATION_LABEL"] == '"ANNOTATION_LABEL_BLOCK_1"' @@ -185,7 +200,7 @@ def test_rules_in_measurement(): in_measurement = ast["IN_MEASUREMENT"] assert in_measurement - assert in_measurement["Identifier"] == ['WHEEL_REVOLUTIONS', 'ENGINE_SPEED', 'RESOLUTION_TICKS'] + assert in_measurement["Identifier"] == ["WHEEL_REVOLUTIONS", "ENGINE_SPEED", "RESOLUTION_TICKS"] def test_rules_out_measurement(): @@ -204,4 +219,9 @@ def test_rules_out_measurement(): out_measurement = ast["OUT_MEASUREMENT"] assert out_measurement - assert out_measurement["Identifier"] == ['OK_FLAG', 'SENSOR_FLAG', 'FAILURE_FLAG', 'GLOBAL_FLAG'] + assert out_measurement["Identifier"] == [ + "OK_FLAG", + "SENSOR_FLAG", + "FAILURE_FLAG", + "GLOBAL_FLAG", + ] diff --git a/tests/rules/test_rules_memory_layout.py b/tests/rules/test_rules_memory_layout.py index bbb49ab..e873647 100644 --- a/tests/rules/test_rules_memory_layout.py +++ b/tests/rules/test_rules_memory_layout.py @@ -52,6 +52,6 @@ def test_rules_memory_layout(): assert memory_layout["Offset"] == ["-1", "-1", "-1", "-1", "-1"] assert len(memory_layout["IF_DATA"]) == 2 assert memory_layout["IF_DATA"][0]["Name"] == "XCP" - assert memory_layout["IF_DATA"][0]["DataParams"] == ['LINK_MAP', 'ref_name', '0x003432'] + assert memory_layout["IF_DATA"][0]["DataParams"] == ["LINK_MAP", "ref_name", "0x003432"] assert memory_layout["IF_DATA"][1]["Name"] == "CANAPE" - assert memory_layout["IF_DATA"][1]["DataParams"] == ['STATIC', 'ref_name', '0xFF'] + assert memory_layout["IF_DATA"][1]["DataParams"] == ["STATIC", "ref_name", "0xFF"] diff --git a/tests/rules/test_rules_memory_segment.py b/tests/rules/test_rules_memory_segment.py index 3ace12c..d7ed4f3 100644 --- a/tests/rules/test_rules_memory_segment.py +++ b/tests/rules/test_rules_memory_segment.py @@ -60,6 +60,6 @@ def test_rules_memory_segment(): assert memory_segment["Offset"] == ["-1", "-1", "-1", "-1", "-1"] assert len(memory_segment["IF_DATA"]) == 2 assert memory_segment["IF_DATA"][0]["Name"] == "XCP" - assert memory_segment["IF_DATA"][0]["DataParams"] == ['LINK_MAP', 'ref_name', '0x003432'] + assert memory_segment["IF_DATA"][0]["DataParams"] == ["LINK_MAP", "ref_name", "0x003432"] assert memory_segment["IF_DATA"][1]["Name"] == "CANAPE" - assert memory_segment["IF_DATA"][1]["DataParams"] == ['STATIC', 'ref_name', '0xFF'] + assert memory_segment["IF_DATA"][1]["DataParams"] == ["STATIC", "ref_name", "0xFF"] diff --git a/tests/rules/test_rules_mod_common.py b/tests/rules/test_rules_mod_common.py index b86181a..10fc385 100644 --- a/tests/rules/test_rules_mod_common.py +++ b/tests/rules/test_rules_mod_common.py @@ -47,14 +47,14 @@ def test_rules_mod_common(): mod_common = ast["MOD_COMMON"] assert mod_common - assert mod_common['Comment'] == '"COMMENT"' - assert mod_common['ALIGNMENT_BYTE'] == '1' - assert mod_common['ALIGNMENT_FLOAT16_IEEE'] == '2' - assert mod_common['ALIGNMENT_FLOAT32_IEEE'] == '4' - assert mod_common['ALIGNMENT_FLOAT64_IEEE'] == '4' - assert mod_common['ALIGNMENT_INT64'] == '8' - assert mod_common['ALIGNMENT_LONG'] == '4' - assert mod_common['ALIGNMENT_WORD'] == '2' - assert mod_common['BYTE_ORDER'] == 'MSB_FIRST' - assert mod_common['DATA_SIZE'] == '16' - assert mod_common['DEPOSIT'] == 'ABSOLUTE' + assert mod_common["Comment"] == '"COMMENT"' + assert mod_common["ALIGNMENT_BYTE"] == "1" + assert mod_common["ALIGNMENT_FLOAT16_IEEE"] == "2" + assert mod_common["ALIGNMENT_FLOAT32_IEEE"] == "4" + assert mod_common["ALIGNMENT_FLOAT64_IEEE"] == "4" + assert mod_common["ALIGNMENT_INT64"] == "8" + assert mod_common["ALIGNMENT_LONG"] == "4" + assert mod_common["ALIGNMENT_WORD"] == "2" + assert mod_common["BYTE_ORDER"] == "MSB_FIRST" + assert mod_common["DATA_SIZE"] == "16" + assert mod_common["DEPOSIT"] == "ABSOLUTE" diff --git a/tests/rules/test_rules_module.py b/tests/rules/test_rules_module.py index 220db58..b4d2a36 100644 --- a/tests/rules/test_rules_module.py +++ b/tests/rules/test_rules_module.py @@ -36,9 +36,11 @@ def test_rules_module(): /begin AXIS_PTS STV_N "first block" 0x9876 N DAMOS_SST 100.0 R_SPEED 21 0.0 5800.0 /end AXIS_PTS - /begin CHARACTERISTIC PUMKF "Pump characteristic map" MAP 0x7140 DAMOS_KF 100.0 R_VOLTAGE 0.0 5000.0 + /begin CHARACTERISTIC PUMKF "Pump characteristic map" + MAP 0x7140 DAMOS_KF 100.0 R_VOLTAGE 0.0 5000.0 /end CHARACTERISTIC - /begin CHARACTERISTIC PUMKF "Characteristic map" MAP 0xFFFF DAMOS_KF 124.0 R_VOLTAGE 0.0 1000.0 + /begin CHARACTERISTIC PUMKF "Characteristic map" + MAP 0xFFFF DAMOS_KF 124.0 R_VOLTAGE 0.0 1000.0 /end CHARACTERISTIC /begin COMPU_METHOD CM_N_SB_03 "CM_Force" TAB_INTP "%3.41" "N" diff --git a/tests/rules/test_rules_overwrite.py b/tests/rules/test_rules_overwrite.py index 2b5270f..69d5654 100644 --- a/tests/rules/test_rules_overwrite.py +++ b/tests/rules/test_rules_overwrite.py @@ -62,9 +62,9 @@ def test_rules_overwrite_full(): assert overwrite["Name"] == "XcpInstance" assert overwrite["AxisNumber"] == "3" assert overwrite["CONVERSION"] == "ConversionMethod1" - assert overwrite["EXTENDED_LIMITS"] == {'LowerLimit': '0', 'UpperLimit': '200'} + assert overwrite["EXTENDED_LIMITS"] == {"LowerLimit": "0", "UpperLimit": "200"} assert overwrite["FORMAT"] == '"%.2f"' assert overwrite["INPUT_QUANTITY"] == "Speed2" - assert overwrite["LIMITS"] == {'LowerLimit': '0', 'UpperLimit': '160'} + assert overwrite["LIMITS"] == {"LowerLimit": "0", "UpperLimit": "160"} assert overwrite["MONOTONY"] == "STRICT_DECREASE" assert overwrite["PHYS_UNIT"] == '"km/h"' diff --git a/tests/rules/test_rules_project.py b/tests/rules/test_rules_project.py index ef26d97..dea8c3a 100644 --- a/tests/rules/test_rules_project.py +++ b/tests/rules/test_rules_project.py @@ -44,10 +44,9 @@ def test_rules_project_two_modules_minimal(): """ ast = A2LYacc().generate_ast(project_content) assert ast - assert ast["ASAP2_VERSION"] == {'VersionNo': '1', 'UpgradeNo': '71'} + assert ast["ASAP2_VERSION"] == {"VersionNo": "1", "UpgradeNo": "71"} project = ast["PROJECT"] - assert project assert project["Name"] == "Example_Project" assert project["LongIdentifier"] == '"ProjectBackupModule"' assert project["HEADER"]["Comment"] == '"Tests a Project with two modules"' diff --git a/tests/rules/test_rules_transformer.py b/tests/rules/test_rules_transformer.py index 3b5583b..bb1d158 100644 --- a/tests/rules/test_rules_transformer.py +++ b/tests/rules/test_rules_transformer.py @@ -87,5 +87,14 @@ def test_rules_transformer_full(): assert transformer["Timeout"] == "5000" assert transformer["Trigger"] == "ON_CHANGE" assert transformer["InverseTransformer"] == "TRANSFORMER_TOOL_REVERSE_1" - assert transformer["TRANSFORMER_IN_OBJECTS"] == ["IN_CHAR_1", "IN_BLOB_1", "IN_TYPEDEF_STRUCT_x1"] - assert transformer["TRANSFORMER_OUT_OBJECTS"] == ["OUT_ID_14", "OUT_STRUCT_x2", "OUT_BLOB_1", "OUT_BLOB_2"] + assert transformer["TRANSFORMER_IN_OBJECTS"] == [ + "IN_CHAR_1", + "IN_BLOB_1", + "IN_TYPEDEF_STRUCT_x1", + ] + assert transformer["TRANSFORMER_OUT_OBJECTS"] == [ + "OUT_ID_14", + "OUT_STRUCT_x2", + "OUT_BLOB_1", + "OUT_BLOB_2", + ] diff --git a/tests/rules/test_rules_typedef_axis.py b/tests/rules/test_rules_typedef_axis.py index e4c305d..593993b 100644 --- a/tests/rules/test_rules_typedef_axis.py +++ b/tests/rules/test_rules_typedef_axis.py @@ -93,7 +93,7 @@ def test_rules_typedef_axis_full(): assert typedef_axis["LowerLimit"] == "0.0" assert typedef_axis["UpperLimit"] == "2460.0" assert typedef_axis["BYTE_ORDER"] == "MSB_LAST_MSW_FIRST" - assert typedef_axis["EXTENDED_LIMITS"] == {'LowerLimit': '0', 'UpperLimit': '3500'} + assert typedef_axis["EXTENDED_LIMITS"] == {"LowerLimit": "0", "UpperLimit": "3500"} assert typedef_axis["FORMAT"] == '"%0.01f"' assert typedef_axis["MONOTONY"] == "STRICT_DECREASE" assert typedef_axis["PHYS_UNIT"] == '"s"' diff --git a/tests/rules/test_rules_typedef_measurement.py b/tests/rules/test_rules_typedef_measurement.py index 500e141..6ba9e3d 100644 --- a/tests/rules/test_rules_typedef_measurement.py +++ b/tests/rules/test_rules_typedef_measurement.py @@ -104,4 +104,7 @@ def test_rules_typedef_measurement_full(): assert typedef_measurement["LAYOUT"] == "ALTERNATE_WITH_Y" assert typedef_measurement["MATRIX_DIM"] == ["1"] assert typedef_measurement["PHYS_UNIT"] == '"V"' - assert typedef_measurement["BIT_OPERATION"] == {'LEFT_SHIFT': {'Bitcount': '4'}, 'SIGN_EXTEND': {'Boolean': True}} + assert typedef_measurement["BIT_OPERATION"] == { + "LEFT_SHIFT": {"Bitcount": "4"}, + "SIGN_EXTEND": {"Boolean": True}, + } diff --git a/tests/rules/test_rules_var_forbidden_comb.py b/tests/rules/test_rules_var_forbidden_comb.py index 62703d8..3a91bd6 100644 --- a/tests/rules/test_rules_var_forbidden_comb.py +++ b/tests/rules/test_rules_var_forbidden_comb.py @@ -38,7 +38,7 @@ def test_rules_var_forbidden_comb(): var_forbidden_comb = ast["VAR_FORBIDDEN_COMB"] assert var_forbidden_comb - assert var_forbidden_comb['CriterionList'][0][0] == 'Car' - assert var_forbidden_comb['CriterionList'][0][1] == 'Limousine' - assert var_forbidden_comb['CriterionList'][1][0] == 'Gear' - assert var_forbidden_comb['CriterionList'][1][1] == 'Manual' + assert var_forbidden_comb["CriterionList"][0][0] == "Car" + assert var_forbidden_comb["CriterionList"][0][1] == "Limousine" + assert var_forbidden_comb["CriterionList"][1][0] == "Gear" + assert var_forbidden_comb["CriterionList"][1][1] == "Manual" diff --git a/tests/rules/test_rules_variant_coding.py b/tests/rules/test_rules_variant_coding.py index 3ecdd74..2cecd3f 100644 --- a/tests/rules/test_rules_variant_coding.py +++ b/tests/rules/test_rules_variant_coding.py @@ -74,34 +74,34 @@ def test_rules_variant_coding(): variant_coding = ast["VARIANT_CODING"] assert variant_coding - assert variant_coding['VAR_NAMING'] == 'NUMERIC' - assert variant_coding['VAR_SEPARATOR'] == '"."' - assert variant_coding['VAR_CHARACTERISTIC'][0]['Name'] == 'PUMKF' - assert variant_coding['VAR_CHARACTERISTIC'][0]['CriterionName'][0] == 'Gear' - assert variant_coding['VAR_CHARACTERISTIC'][0]['CriterionName'][1] == 'Car' - assert variant_coding['VAR_CHARACTERISTIC'][1]['Name'] == 'NLLM' - assert variant_coding['VAR_CHARACTERISTIC'][1]['CriterionName'][0] == 'Gear' - assert variant_coding['VAR_CHARACTERISTIC'][1]['CriterionName'][1] == 'Car' - assert variant_coding['VAR_CHARACTERISTIC'][1]['VAR_ADDRESS']['Address'][0] == '0x8840' - assert variant_coding['VAR_CHARACTERISTIC'][1]['VAR_ADDRESS']['Address'][1] == '0x8858' - assert variant_coding['VAR_CHARACTERISTIC'][1]['VAR_ADDRESS']['Address'][2] == '0x8870' - assert variant_coding['VAR_CHARACTERISTIC'][1]['VAR_ADDRESS']['Address'][3] == '0x8888' - assert variant_coding['VAR_CRITERION'][0]['Name'] == 'Car' - assert variant_coding['VAR_CRITERION'][0]['LongIdentifier'] == '"Car body"' - assert variant_coding['VAR_CRITERION'][0]['VALUE'][0] == 'Limousine' - assert variant_coding['VAR_CRITERION'][0]['VALUE'][1] == 'Kombi' - assert variant_coding['VAR_CRITERION'][0]['VALUE'][2] == 'Cabrio' - assert variant_coding['VAR_CRITERION'][0]['VAR_MEASUREMENT'] == 'S_CAR' - assert variant_coding['VAR_CRITERION'][0]['VAR_SELECTION_CHARACTERISTIC'] == 'V_CAR' - assert variant_coding['VAR_CRITERION'][1]['Name'] == 'Gear' - assert variant_coding['VAR_CRITERION'][1]['LongIdentifier'] == '"Type of gear box"' - assert variant_coding['VAR_CRITERION'][1]['VALUE'][0] == 'Manual' - assert variant_coding['VAR_CRITERION'][1]['VALUE'][1] == 'Automatic' - assert variant_coding['VAR_FORBIDDEN_COMB'][0]['CriterionList'][0][0] == 'Car' - assert variant_coding['VAR_FORBIDDEN_COMB'][0]['CriterionList'][0][1] == 'Limousine' - assert variant_coding['VAR_FORBIDDEN_COMB'][0]['CriterionList'][1][0] == 'Gear' - assert variant_coding['VAR_FORBIDDEN_COMB'][0]['CriterionList'][1][1] == 'Manual' - assert variant_coding['VAR_FORBIDDEN_COMB'][1]['CriterionList'][0][0] == 'Car' - assert variant_coding['VAR_FORBIDDEN_COMB'][1]['CriterionList'][0][1] == 'Cabrio' - assert variant_coding['VAR_FORBIDDEN_COMB'][1]['CriterionList'][1][0] == 'Gear' - assert variant_coding['VAR_FORBIDDEN_COMB'][1]['CriterionList'][1][1] == 'Automatic' + assert variant_coding["VAR_NAMING"] == "NUMERIC" + assert variant_coding["VAR_SEPARATOR"] == '"."' + assert variant_coding["VAR_CHARACTERISTIC"][0]["Name"] == "PUMKF" + assert variant_coding["VAR_CHARACTERISTIC"][0]["CriterionName"][0] == "Gear" + assert variant_coding["VAR_CHARACTERISTIC"][0]["CriterionName"][1] == "Car" + assert variant_coding["VAR_CHARACTERISTIC"][1]["Name"] == "NLLM" + assert variant_coding["VAR_CHARACTERISTIC"][1]["CriterionName"][0] == "Gear" + assert variant_coding["VAR_CHARACTERISTIC"][1]["CriterionName"][1] == "Car" + assert variant_coding["VAR_CHARACTERISTIC"][1]["VAR_ADDRESS"]["Address"][0] == "0x8840" + assert variant_coding["VAR_CHARACTERISTIC"][1]["VAR_ADDRESS"]["Address"][1] == "0x8858" + assert variant_coding["VAR_CHARACTERISTIC"][1]["VAR_ADDRESS"]["Address"][2] == "0x8870" + assert variant_coding["VAR_CHARACTERISTIC"][1]["VAR_ADDRESS"]["Address"][3] == "0x8888" + assert variant_coding["VAR_CRITERION"][0]["Name"] == "Car" + assert variant_coding["VAR_CRITERION"][0]["LongIdentifier"] == '"Car body"' + assert variant_coding["VAR_CRITERION"][0]["VALUE"][0] == "Limousine" + assert variant_coding["VAR_CRITERION"][0]["VALUE"][1] == "Kombi" + assert variant_coding["VAR_CRITERION"][0]["VALUE"][2] == "Cabrio" + assert variant_coding["VAR_CRITERION"][0]["VAR_MEASUREMENT"] == "S_CAR" + assert variant_coding["VAR_CRITERION"][0]["VAR_SELECTION_CHARACTERISTIC"] == "V_CAR" + assert variant_coding["VAR_CRITERION"][1]["Name"] == "Gear" + assert variant_coding["VAR_CRITERION"][1]["LongIdentifier"] == '"Type of gear box"' + assert variant_coding["VAR_CRITERION"][1]["VALUE"][0] == "Manual" + assert variant_coding["VAR_CRITERION"][1]["VALUE"][1] == "Automatic" + assert variant_coding["VAR_FORBIDDEN_COMB"][0]["CriterionList"][0][0] == "Car" + assert variant_coding["VAR_FORBIDDEN_COMB"][0]["CriterionList"][0][1] == "Limousine" + assert variant_coding["VAR_FORBIDDEN_COMB"][0]["CriterionList"][1][0] == "Gear" + assert variant_coding["VAR_FORBIDDEN_COMB"][0]["CriterionList"][1][1] == "Manual" + assert variant_coding["VAR_FORBIDDEN_COMB"][1]["CriterionList"][0][0] == "Car" + assert variant_coding["VAR_FORBIDDEN_COMB"][1]["CriterionList"][0][1] == "Cabrio" + assert variant_coding["VAR_FORBIDDEN_COMB"][1]["CriterionList"][1][0] == "Gear" + assert variant_coding["VAR_FORBIDDEN_COMB"][1]["CriterionList"][1][1] == "Automatic" diff --git a/tests/validator/test_validator_a2l_sections.py b/tests/validator/test_validator_a2l_sections.py index 9c4bd1d..fe6157c 100644 --- a/tests/validator/test_validator_a2l_sections.py +++ b/tests/validator/test_validator_a2l_sections.py @@ -71,7 +71,10 @@ def test_validator_with_missing_begin_statement(): with pytest.raises(A2LValidator.A2LValidationError) as ex: A2LValidator().validate(a2l_string) assert len(ex.value.errors) == 1 - assert ex.value.errors[0] == "Detected unexpected end of section on '/end CHARACTERISTIC' at line 3." + assert ( + ex.value.errors[0] + == "Detected unexpected end of section on '/end CHARACTERISTIC' at line 3." + ) def test_validator_with_nested_structure_error(): @@ -92,7 +95,10 @@ def test_validator_with_nested_structure_error(): A2LValidator().validate(a2l_string) assert ex - assert ex.value.errors[0] == "Detected unexpected end of section on '/end CHARACTERISTIC' at line 5." + assert ( + ex.value.errors[0] + == "Detected unexpected end of section on '/end CHARACTERISTIC' at line 5." + ) assert ex.value.errors[1] == "Detected unexpected end of section on '/end MOD_PAR' at line 7." assert ex.value.errors[2] == "Detected unexpected end of section on '/end PROJECT' at line 8." assert ex.value.errors[3] == "Detected unclosed section 'CHARACTERISTIC' starting at line 3." diff --git a/tests/validator/test_validator_remove_comments.py b/tests/validator/test_validator_remove_comments.py index c3cb6be..4150c42 100644 --- a/tests/validator/test_validator_remove_comments.py +++ b/tests/validator/test_validator_remove_comments.py @@ -51,10 +51,16 @@ def test_validator_inline_comment(line, expected_line): ("/* /begin PROJECT /* This is a nested comment */", ""), ("/begin PROJECT /*No space after comment*//end PROJECT", "/begin PROJECT /end PROJECT"), ( - "/begin PROJECT /* comment\n/begin MODULE /* nested\nEnd of comment.*/\n/*END PROJECT*//end PROJECT", + ( + "/begin PROJECT /* comment\n/begin MODULE /* nested\n" + "End of comment.*/\n/*END PROJECT*//end PROJECT" + ), "/begin PROJECT \n/end PROJECT", ), - ("/begin MODULE M_COMMENT /************/ /end MODULE", "/begin MODULE M_COMMENT /end MODULE"), + ( + "/begin MODULE M_COMMENT /************/ /end MODULE", + "/begin MODULE M_COMMENT /end MODULE", + ), ], ) def test_validator_multiline_comment(line, expected_line): @@ -76,9 +82,15 @@ def test_validator_multiline_comment(line, expected_line): "/begin TEXT 'text // containing /* comments */' /end TEXT", "/begin TEXT 'text // containing /* comments */' /end TEXT", ), - ('/begin TEXT "text // /* comment */" /end TEXT', '/begin TEXT "text // /* comment */" /end TEXT'), + ( + '/begin TEXT "text // /* comment */" /end TEXT', + '/begin TEXT "text // /* comment */" /end TEXT', + ), ('/begin MODULE /*"String inside comment"*/ /end MODULE', "/begin MODULE /end MODULE"), - ("/begin PROJECT /* 'String inside comment' */ /end PROJECT", "/begin PROJECT /end PROJECT"), + ( + "/begin PROJECT /* 'String inside comment' */ /end PROJECT", + "/begin PROJECT /end PROJECT", + ), (' /BEGIN BLOB TEST_BLOB ""', ' /BEGIN BLOB TEST_BLOB ""'), ], )