From fed91c69a7ee40a868bc50e4e7634ede09fe443c Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 5 Jan 2021 11:15:04 +0100 Subject: [PATCH] Introduces separate runtime provider schema The provider.yaml contains more information that required at runtime (specifically about documentation building). Those fields are not needed at runtime and their presence is optional. Also the runtime check for provider information should be more relexed and allow for future compatibility (with additional properties set to false). This way we can add new, optional fields to provider.yaml without worrying about breaking future-compatibility of providers with future airflow versions. This changei restores 'additionalProperties': false in the main, development-focused provider.yaml schema and introduced new runtime schema that is used to verify the provider info when providers are discovered by airflow. This 'runtime' version should change very rarely as change to add a new required property in it breaks compatibility of providers with already released versions of Airflow. We also trim-down the provider.yaml file when preparing provider packages to only contain those fields that are required in the runtime schema. --- .pre-commit-config.yaml | 6 +- MANIFEST.in | 2 +- TESTING.rst | 2 +- .../provider-2.0.0.yaml.schema.json | 2 +- airflow/provider.yaml.schema.json | 12 +- airflow/provider_info.schema.json | 38 ++++++ airflow/providers_manager.py | 17 ++- .../prepare_provider_packages.py | 126 ++++++++++++------ dev/templates/announce_email.j2 | 2 +- docs/apache-airflow-providers/index.rst | 9 +- scripts/ci/libraries/_build_images.sh | 2 +- .../run_prepare_provider_packages.sh | 7 +- setup.cfg | 2 +- 13 files changed, 155 insertions(+), 72 deletions(-) create mode 100644 airflow/provider_info.schema.json diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 019ff45522ce5..e5e3fd15964f3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -500,11 +500,7 @@ repos: - https://json-schema.org/draft-07/schema language: python pass_filenames: true - files: > - (?x) - ^airflow/provider.yaml.schema.json$| - ^airflow/config_templates/config.yml.schema.json$| - ^airflow/serialization/schema.json$ + files: .*\.schema\.json$ require_serial: true additional_dependencies: ['jsonschema==3.2.0', 'PyYAML==5.3.1', 'requests==2.25.0'] - id: json-schema diff --git a/MANIFEST.in b/MANIFEST.in index f30a39019a0c2..2864364de5169 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -32,7 +32,7 @@ global-exclude __pycache__ *.pyc include airflow/alembic.ini include airflow/api_connexion/openapi/v1.yaml include airflow/git_version -include airflow/provider.yaml.schema.json +include airflow/provider_info.schema.json include airflow/customized_form_field_behaviours.schema.json include airflow/serialization/schema.json include airflow/utils/python_virtualenv_script.jinja2 diff --git a/TESTING.rst b/TESTING.rst index 7ffd50ce28b79..30e6fff897a2e 100644 --- a/TESTING.rst +++ b/TESTING.rst @@ -973,7 +973,7 @@ If ``current`` is specified (default), then the current version of Airflow is us Otherwise, the released version of Airflow is installed. The ``-install-airflow-version=`` command make sure that the current (from sources) version of -Airflow is removed and the released version of Airflow from ``Pypi`` is installed. Note that tests sources +Airflow is removed and the released version of Airflow from ``PyPI`` is installed. Note that tests sources are not removed and they can be used to run tests (unit tests and system tests) against the freshly installed version. diff --git a/airflow/deprecated_schemas/provider-2.0.0.yaml.schema.json b/airflow/deprecated_schemas/provider-2.0.0.yaml.schema.json index d6fea5019ccb2..6ba4e72bce91a 100644 --- a/airflow/deprecated_schemas/provider-2.0.0.yaml.schema.json +++ b/airflow/deprecated_schemas/provider-2.0.0.yaml.schema.json @@ -15,7 +15,7 @@ "type": "string" }, "versions": { - "description": "List of available versions in Pypi. Sorted descending according to release date.", + "description": "List of available versions in PyPI. Sorted descending according to release date.", "type": "array", "items": { "type": "string" diff --git a/airflow/provider.yaml.schema.json b/airflow/provider.yaml.schema.json index 0ee3b56dcbcc4..bdec41dc3cc0b 100644 --- a/airflow/provider.yaml.schema.json +++ b/airflow/provider.yaml.schema.json @@ -15,7 +15,7 @@ "type": "string" }, "versions": { - "description": "List of available versions in Pypi. Sorted descending according to release date.", + "description": "List of available versions in PyPI. Sorted descending according to release date.", "type": "array", "items": { "type": "string" @@ -68,7 +68,7 @@ "maxItems": 1 } }, - "additionalProperties": true, + "additionalProperties": false, "required": [ "integration-name", "external-doc-url", @@ -93,7 +93,7 @@ } } }, - "additionalProperties": true, + "additionalProperties": false, "required": [ "integration-name", "python-modules" @@ -141,7 +141,7 @@ } } }, - "additionalProperties": true, + "additionalProperties": false, "required": [ "integration-name", "python-modules" @@ -170,7 +170,7 @@ "description": "List of python modules containing the transfers." } }, - "additionalProperties": true, + "additionalProperties": false, "required": [ "source-integration-name", "target-integration-name", @@ -193,7 +193,7 @@ } } }, - "additionalProperties": true, + "additionalProperties": false, "required": [ "name", "package-name", diff --git a/airflow/provider_info.schema.json b/airflow/provider_info.schema.json new file mode 100644 index 0000000000000..e15213af6c2d5 --- /dev/null +++ b/airflow/provider_info.schema.json @@ -0,0 +1,38 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "package-name": { + "description": "Package name available under which the package is available in the PyPI repository.", + "type": "string" + }, + "name": { + "description": "Provider name", + "type": "string" + }, + "description": { + "description": "Information about the package in RST format", + "type": "string" + }, + "hook-class-names": { + "type": "array", + "description": "Hook class names that provide connection types to core", + "items": { + "type": "string" + } + }, + "extra-links": { + "type": "array", + "description": "Class name that provide extra link functionality", + "items": { + "type": "string" + } + } + }, + "required": [ + "name", + "package-name", + "description", + "versions" + ] +} diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py index b3c7e5c92d3f4..02bf5e047eec2 100644 --- a/airflow/providers_manager.py +++ b/airflow/providers_manager.py @@ -39,9 +39,9 @@ log = logging.getLogger(__name__) -def _create_provider_schema_validator(): - """Creates JSON schema validator from the provider.yaml.schema.json""" - schema = json.loads(importlib_resources.read_text('airflow', 'provider.yaml.schema.json')) +def _create_provider_info_schema_validator(): + """Creates JSON schema validator from the provider_info.schema.json""" + schema = json.loads(importlib_resources.read_text('airflow', 'provider_info.schema.json')) cls = jsonschema.validators.validator_for(schema) validator = cls(schema) return validator @@ -106,7 +106,7 @@ def __init__(self): # Customizations for javascript fields are kept here self._field_behaviours: Dict[str, Dict] = {} self._extra_link_class_name_set: Set[str] = set() - self._provider_schema_validator = _create_provider_schema_validator() + self._provider_schema_validator = _create_provider_info_schema_validator() self._customized_form_fields_schema_validator = ( _create_customized_form_field_behaviours_schema_validator() ) @@ -114,7 +114,7 @@ def __init__(self): def initialize_providers_manager(self): """Lazy initialization of provider data.""" - # We cannot use @cache here because it does not work during pytests, apparently each test + # We cannot use @cache here because it does not work during pytest, apparently each test # runs it it's own namespace and ProvidersManager is a different object in each namespace # even if it is singleton but @cache on the initialize_providers_manager message still works in the # way that it is called only once for one of the objects (at least this is how it looks like @@ -139,7 +139,10 @@ def _discover_all_providers_from_packages(self) -> None: """ Discovers all providers by scanning packages installed. The list of providers should be returned via the 'apache_airflow_provider' entrypoint as a dictionary conforming to the - 'airflow/provider.yaml.schema.json' schema. + 'airflow/provider_info.schema.json' schema. Note that the schema is different at runtime + than provider.yaml.schema.json. The development version of provider schema is more strict and changes + together with the code. The runtime version is more relaxed (allows for additional properties) + and verifies only the subset of fields that are needed at runtime. """ for entry_point, dist in entry_points_with_dist('apache_airflow_provider'): package_name = dist.metadata['name'] @@ -194,7 +197,7 @@ def _add_provider_info_from_local_source_files_on_path(self, path) -> None: for folder, subdirs, files in os.walk(path, topdown=True): for filename in fnmatch.filter(files, "provider.yaml"): package_name = "apache-airflow-providers" + folder[len(root_path) :].replace(os.sep, "-") - # We are skipping discovering snowflake because of snowflake monkeypatching problem + # We are skipping discovering snowflake because of snowflake monkey-patching problem # This is only for local development - it has no impact for the packaged snowflake provider # That should work on its own # https://github.com/apache/airflow/issues/12881 diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py index e82da6dc94f62..f414cbec4159b 100644 --- a/dev/provider_packages/prepare_provider_packages.py +++ b/dev/provider_packages/prepare_provider_packages.py @@ -41,6 +41,7 @@ import jsonschema import yaml from packaging.version import Version +from rich import print PROVIDER_TEMPLATE_PREFIX = "PROVIDER_" BACKPORT_PROVIDER_TEMPLATE_PREFIX = "BACKPORT_PROVIDER_" @@ -59,6 +60,8 @@ SOURCE_DIR_PATH, "airflow", "deprecated_schemas", "provider-2.0.0.yaml.schema.json" ) +PROVIDER_RUNTIME_DATA_SCHEMA_PATH = os.path.join(SOURCE_DIR_PATH, "airflow", "provider_info.schema.json") + sys.path.insert(0, SOURCE_DIR_PATH) # those imports need to come after the above sys.path.insert to make sure that Airflow @@ -86,7 +89,7 @@ def with_group(title): https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#grouping-log-lines """ if os.environ.get('GITHUB_ACTIONS', 'false') != "true": - print("#" * 20, title, "#" * 20) + print("[blue]", "#" * 20, title, "#" * 20, "[/]") yield return print(f"::group::{title}") @@ -606,7 +609,7 @@ def print_wrong_naming(entity_type: EntityType, wrong_classes: List[Tuple[type, :param wrong_classes: list of wrong entities """ if wrong_classes: - print(f"\nThere are wrongly named entities of type {entity_type}:\n", file=sys.stderr) + print(f"\n[red]There are wrongly named entities of type {entity_type}:[/]\n", file=sys.stderr) for entity_type, message in wrong_classes: print(f"{entity_type}: {message}", file=sys.stderr) @@ -868,7 +871,7 @@ def get_all_releases(provider_package_path: str, backport_packages: bool) -> Lis content = changes_file.read() found = re.search(r'/([a-z0-9]*)\)', content, flags=re.MULTILINE) if not found: - print("No commit found. This seems to be first time you run it", file=sys.stderr) + print("[yellow]No commit found. This seems to be first time you run it[/]", file=sys.stderr) else: last_commit_hash = found.group(1) release_version = file_name[len(changes_file_prefix) :][:-3] @@ -953,8 +956,8 @@ def check_if_release_version_ok( if previous_release_version: if Version(current_release_version) < Version(previous_release_version): print( - f"The release {current_release_version} must be not less than " - f"{previous_release_version} - last release for the package", + f"[red]The release {current_release_version} must be not less than " + f"{previous_release_version} - last release for the package[/]", file=sys.stderr, ) sys.exit(2) @@ -990,8 +993,8 @@ def make_sure_remote_apache_exists_and_fetch(): except subprocess.CalledProcessError as e: if e.returncode == 128: print( - "The remote `apache-https-for-providers` already exists. If you have trouble running " - "git log delete the remote", + "[yellow]The remote `apache-https-for-providers` already exists. If you have trouble running " + "git log delete the remote[/]", file=sys.stderr, ) else: @@ -1112,14 +1115,14 @@ def check_if_classes_are_properly_named( error_encountered = False if not is_camel_case_with_acronyms(class_name): print( - f"The class {class_full_name} is wrongly named. The " - f"class name should be CamelCaseWithACRONYMS !" + f"[red]The class {class_full_name} is wrongly named. The " + f"class name should be CamelCaseWithACRONYMS ![/]" ) error_encountered = True if not class_name.endswith(class_suffix): print( - f"The class {class_full_name} is wrongly named. It is one of the {entity_type.value}" - f" so it should end with {class_suffix}" + f"[red]The class {class_full_name} is wrongly named. It is one of the {entity_type.value}" + f" so it should end with {class_suffix}[/]" ) error_encountered = True total_class_number += 1 @@ -1137,54 +1140,89 @@ def get_package_pip_name(provider_package_id: str, backport_packages: bool): def validate_provider_info_with_2_0_0_schema(provider_info: Dict[str, Any]) -> None: """ - Validates provider info against 2.0.0 schema. + Validates provider info against 2.0.0 schema. We need to run this validation until we make Airflow + 2.0.0 yank and add apache-airflow>=2.0.1 (possibly) to provider dependencies. + :param provider_info: provider info to validate """ - def _load_schema() -> Dict[str, Any]: - with open(PROVIDER_2_0_0_DATA_SCHEMA_PATH) as schema_file: - content = json.load(schema_file) - return content - - schema = _load_schema() + with open(PROVIDER_2_0_0_DATA_SCHEMA_PATH) as schema_file: + schema = json.load(schema_file) try: jsonschema.validate(provider_info, schema=schema) + print("[green]Provider info validated against 2.0.0 schema[/]") except jsonschema.ValidationError as e: raise Exception( "Error when validating schema. The schema must be Airflow 2.0.0 compatible. " - "If you added any fields please remove them via 'remove_extra_fields' method.", + "If you added any fields please remove them via 'convert_to_provider_info' method.", e, ) -def remove_logo_field(original_provider_info: Dict[str, Any]): - updated_provider_info = deepcopy(original_provider_info) - expression = jsonpath_ng.parse("integrations..logo") - updated_provider_info = expression.filter(lambda x: True, updated_provider_info) - return updated_provider_info +def validate_provider_info_with_runtime_schema(provider_info: Dict[str, Any]) -> None: + """ + Validates provider info against the runtime schema. This way we check if the provider info in the + packages is future-compatible. The Runtime Schema should only change when there is a major version + change. + + :param provider_info: provider info to validate + """ + + with open(PROVIDER_RUNTIME_DATA_SCHEMA_PATH) as schema_file: + schema = json.load(schema_file) + try: + jsonschema.validate(provider_info, schema=schema) + print("[green]Provider info validated against runtime schema[/]") + except jsonschema.ValidationError as e: + raise Exception( + "Error when validating schema. The schema must be compatible with " + + "airflow/provider_info.schema.json. " + + "If you added any fields please remove them via 'convert_to_provider_info' method.", + e, + ) -def remove_extra_fields(provider_info: Dict[str, Any]) -> Dict[str, Any]: +def convert_to_provider_info(provider_info: Dict[str, Any]) -> Dict[str, Any]: """ In Airflow 2.0.0 we set 'additionalProperties" to 'false' in provider's schema, which makes the schema - non future-compatible. While we changed tho additionalProperties to 'true' in 2.0.1, we have to + non future-compatible. + + While we changed tho additionalProperties to 'true' in 2.0.1, we have to make sure that the returned provider_info when preparing package is compatible with the older version of the schema and remove all the newly added fields until we deprecate (possibly even yank) 2.0.0 and make provider packages depend on Airflow >=2.0.1. + + Currently we have two provider schemas: + * provider.yaml.schema.json that is used to verify the schema while it is developed (it has, for example + additionalProperties set to false, to avoid typos in field names). This is the full set of + fields that are used for both: runtime information and documentation building. + * provider_info.schema.json that is used to verify the schema at runtime - it only contains + fields from provider.yaml that are necessary for runtime provider discovery. + + This method converts the full provider.yaml schema into the limited version needed at runtime. """ - provider_info = remove_logo_field(provider_info) - return provider_info + updated_provider_info = deepcopy(provider_info) + expression = jsonpath_ng.parse("[hooks,operators,integrations,sensors,transfers]") + return expression.filter(lambda x: True, updated_provider_info) -def get_provider_info(provider_package_id: str) -> Dict[str, Any]: +def get_provider_info_from_provider_yaml(provider_package_id: str) -> Dict[str, Any]: + """ + Retrieves provider info from the provider yaml file. The provider yaml file contains more information + than provider_info that is used at runtime. This method converts the full provider yaml file into + stripped-down provider info and validates it against deprecated 2.0.0 schema and runtime schema. + :param provider_package_id: package id to retrieve provider.yaml from + :return: provider_info dictionary + """ provider_yaml_file_name = os.path.join(get_source_package_path(provider_package_id), "provider.yaml") if not os.path.exists(provider_yaml_file_name): raise Exception(f"The provider.yaml file is missing: {provider_yaml_file_name}") with open(provider_yaml_file_name) as provider_file: - provider_info = yaml.safe_load(provider_file.read()) - stripped_provider_info = remove_extra_fields(provider_info) - validate_provider_info_with_2_0_0_schema(stripped_provider_info) - return stripped_provider_info + provider_yaml_dict = yaml.safe_load(provider_file.read()) + provider_info = convert_to_provider_info(provider_yaml_dict) + validate_provider_info_with_2_0_0_schema(provider_info) + validate_provider_info_with_runtime_schema(provider_info) + return provider_info def update_generated_files_for_package( @@ -1255,7 +1293,7 @@ def update_generated_files_for_package( "EXTRAS_REQUIREMENTS": get_package_extras( provider_package_id=provider_package_id, backport_packages=backport_packages ), - "PROVIDER_INFO": get_provider_info(provider_package_id), + "PROVIDER_INFO": get_provider_info_from_provider_yaml(provider_package_id), } if update_release_notes: git_cmd = get_git_command(previous_release) @@ -1299,7 +1337,7 @@ def update_generated_files_for_package( bad = bad + sum([len(entity_summary.wrong_entities) for entity_summary in entity_summaries.values()]) if bad != 0: print() - print(f"ERROR! There are {bad} errors of {total} entities for {provider_package_id}") + print(f"[red]There are {bad} errors of {total} entities for {provider_package_id}[/]") print() return total, bad @@ -1396,7 +1434,10 @@ def prepare_setup_cfg_file(context): def prepare_get_provider_info_py_file(context, provider_package_id: str): get_provider_template_name = "get_provider_info" get_provider_file_path = os.path.abspath( - os.path.join(get_target_providers_package_folder(provider_package_id), "get_provider_info.py") + os.path.join( + get_target_providers_package_folder(provider_package_id), + "get_provider_info.py", + ) ) get_provider_content = render_template( template_name=get_provider_template_name, @@ -1473,7 +1514,7 @@ def update_release_notes_for_packages( bad += inc_bad if bad == 0: print() - print(f"All good! All {total} entities are properly named") + print(f"[green]All good! All {total} entities are properly named[/]") print() print("Totals:") print() @@ -1489,7 +1530,7 @@ def update_release_notes_for_packages( print() else: print() - print(f"ERROR! There are in total: {bad} entities badly named out of {total} entities ") + print(f"[red]There are in total: {bad} entities badly named out of {total} entities[/]") print() sys.exit(1) @@ -1547,14 +1588,14 @@ def copy_readme_and_changelog(provider_package_id: str, backport_packages: bool) outfile.write(line) -def print_provider_packages_list(args): +def print_provider_packages_list(_): """List all provider packages.""" providers = list(PROVIDERS_REQUIREMENTS.keys()) for provider in providers: print(provider) -def print_backport_packages_list(args): +def print_backport_packages_list(_): """Lists all packages that are backportable.""" providers = get_all_backportable_providers() for provider in providers: @@ -1565,7 +1606,7 @@ def get_version_suffix(version_suffix): return version_suffix if version_suffix is not None else "" -def update_package_release_notes(args): +def update_package_release_notes(_): """Updates package release notes.""" release_ver = "" suffix = get_version_suffix(args.version_suffix) @@ -1590,7 +1631,7 @@ def update_package_release_notes(args): ) -def generate_setup_files(args): +def generate_setup_files(args: Any): """Generates setup files for the package.""" print() print() @@ -1627,6 +1668,7 @@ def build_provider_package(args): subprocess.check_call( command, ) + print(f"[green]Prepared provider package {_provider_package} in format ${package_format}[/]") def get_parser(): diff --git a/dev/templates/announce_email.j2 b/dev/templates/announce_email.j2 index 5236ea61a0af5..7043133fbc94b 100644 --- a/dev/templates/announce_email.j2 +++ b/dev/templates/announce_email.j2 @@ -29,7 +29,7 @@ Apache Airflow - A platform to programmatically author, schedule, and monitor wo The official source release: https://dist.apache.org/repos/dist/release/airflow/{{ version }} -The Pypi package: +The PyPI package: https://pypi.org/project/apache-airflow/{{ version }}/ The documentation is available on: diff --git a/docs/apache-airflow-providers/index.rst b/docs/apache-airflow-providers/index.rst index b3db7afa5490b..17519219a4775 100644 --- a/docs/apache-airflow-providers/index.rst +++ b/docs/apache-airflow-providers/index.rst @@ -110,7 +110,7 @@ the package. We are using standard mechanism of python to define needs to define appropriate entry-point ``apache_airflow_provider`` which has to point to a callable implemented by your package and return a dictionary containing the list of discoverable capabilities of your package. The dictionary has to follow the -`json-schema specification `_. +`json-schema specification `_. Most of the schema provides extension point for the documentation (which you might want to also use for your own purpose) but the important fields from the extensibility point of view are those: @@ -215,8 +215,11 @@ You need to do the following to turn an existing Python package into a provider * Add the ``apache_airflow_provider`` entry point in the ``setup.cfg`` - this tells airflow where to get the required provider metadata * Create the function that you refer to in the first step as part of your package: this functions returns a - dictionary that contains all meta-data about your provider package; see also ``provider.yaml`` - files in the community managed provider packages as examples + dictionary that contains all meta-data about your provider package +* note that the dictionary should be compliant with ``airflow/provider_info.schema.json`` JSON-schema + specification and the community-managed providers have more fields there that are used to build + documentation, but the requirement for runtime information only contains several fields from the + runtime schema. See below for examples. Example ``setup.cfg``: diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh index a90e47deaa26d..67975cf6fa526 100644 --- a/scripts/ci/libraries/_build_images.sh +++ b/scripts/ci/libraries/_build_images.sh @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. -# For remote installation of airflow (from GitHub or Pypi) when building the image, you need to +# For remote installation of airflow (from GitHub or PyPI) when building the image, you need to # pass build flags depending on the version and method of the installation (for example to # get proper requirement constraint files) function build_images::add_build_args_for_remote_install() { diff --git a/scripts/in_container/run_prepare_provider_packages.sh b/scripts/in_container/run_prepare_provider_packages.sh index c6f30e79440cc..5fb434d2aa0e0 100755 --- a/scripts/in_container/run_prepare_provider_packages.sh +++ b/scripts/in_container/run_prepare_provider_packages.sh @@ -167,7 +167,8 @@ function build_provider_packages() { cat "${LOG_FILE}" exit "${RES}" fi - echo " Prepared ${PACKAGE_TYPE} package ${PROVIDER_PACKAGE} format ${PACKAGE_FORMAT}" + echo "===================================================================================" + echo "${COLOR_GREEN}OK Prepared ${PACKAGE_TYPE} package ${PROVIDER_PACKAGE} format ${PACKAGE_FORMAT}${COLOR_RESET}" echo "===================================================================================" group_end done @@ -196,9 +197,9 @@ function rename_packages_if_needed() { fi fi - popd + popd >/dev/null echo - echo "Airflow packages are in dist folder " + echo "${COLOR_GREEN}OK Airflow packages are prepared in dist folder${COLOR_RESET}" echo group_end diff --git a/setup.cfg b/setup.cfg index 7f7e7d3037a52..4e3e01622cdd7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -144,7 +144,7 @@ airflow= alembic.ini git_version customized_form_field_behaviours.schema.json - provider.yaml.schema.json + provider_info.schema.json airflow.api_connexion.openapi=*.yaml airflow.serialization=*.json