From 08337aa7637b290bb8407c38b2a5dbe3e8383b3e Mon Sep 17 00:00:00 2001 From: Stepan Blyshchak <38952541+stepanblyschak@users.noreply.github.com> Date: Thu, 29 Apr 2021 04:58:30 +0300 Subject: [PATCH] [sonic-package-manager] first phase implementation of sonic-package-manager (#1527) What I did Implemented sonic-package-manager utility to manager SONiC Packages as per HLD Azure/SONiC#682. Implemented optional logic to migrate packages into new SONiC image in sonic-installer. How I did it Implemented as per HLD Azure/SONiC#682. How to verify it (Doc: Azure/SONiC#682) install package uninstall package upgrade package S2S upgrade THANK YOU, Stepan! --- doc/Command-Reference.md | 318 +++++- setup.py | 14 + .../bash_completion.d/sonic-package-manager | 8 + sonic-utilities-data/bash_completion.d/spm | 1 + sonic-utilities-data/templates/dump.sh.j2 | 29 + .../templates/service_mgmt.sh.j2 | 149 +++ .../templates/sonic.service.j2 | 39 + sonic-utilities-data/templates/timer.unit.j2 | 15 + sonic_installer/bootloader/aboot.py | 11 +- sonic_installer/bootloader/bootloader.py | 5 +- sonic_installer/common.py | 3 + sonic_installer/main.py | 127 ++- sonic_package_manager/__init__.py | 5 + sonic_package_manager/constraint.py | 166 ++++ sonic_package_manager/database.py | 222 +++++ sonic_package_manager/dockerapi.py | 226 +++++ sonic_package_manager/errors.py | 146 +++ sonic_package_manager/logger.py | 29 + sonic_package_manager/main.py | 460 +++++++++ sonic_package_manager/manager.py | 931 ++++++++++++++++++ sonic_package_manager/manifest.py | 210 ++++ sonic_package_manager/metadata.py | 185 ++++ sonic_package_manager/package.py | 53 + sonic_package_manager/progress.py | 52 + sonic_package_manager/reference.py | 30 + sonic_package_manager/registry.py | 157 +++ .../service_creator/__init__.py | 3 + .../service_creator/creator.py | 342 +++++++ .../service_creator/feature.py | 108 ++ .../service_creator/sonic_db.py | 98 ++ .../service_creator/utils.py | 17 + sonic_package_manager/source.py | 183 ++++ sonic_package_manager/utils.py | 42 + sonic_package_manager/version.py | 23 + tests/sonic_package_manager/conftest.py | 377 +++++++ tests/sonic_package_manager/test_cli.py | 63 ++ .../sonic_package_manager/test_constraint.py | 76 ++ tests/sonic_package_manager/test_database.py | 89 ++ tests/sonic_package_manager/test_manager.py | 322 ++++++ tests/sonic_package_manager/test_manifest.py | 74 ++ tests/sonic_package_manager/test_metadata.py | 37 + tests/sonic_package_manager/test_reference.py | 18 + tests/sonic_package_manager/test_registry.py | 15 + .../test_service_creator.py | 171 ++++ tests/sonic_package_manager/test_utils.py | 8 + 45 files changed, 5633 insertions(+), 24 deletions(-) create mode 100644 sonic-utilities-data/bash_completion.d/sonic-package-manager create mode 120000 sonic-utilities-data/bash_completion.d/spm create mode 100644 sonic-utilities-data/templates/dump.sh.j2 create mode 100644 sonic-utilities-data/templates/service_mgmt.sh.j2 create mode 100644 sonic-utilities-data/templates/sonic.service.j2 create mode 100644 sonic-utilities-data/templates/timer.unit.j2 create mode 100644 sonic_package_manager/__init__.py create mode 100644 sonic_package_manager/constraint.py create mode 100644 sonic_package_manager/database.py create mode 100644 sonic_package_manager/dockerapi.py create mode 100644 sonic_package_manager/errors.py create mode 100644 sonic_package_manager/logger.py create mode 100644 sonic_package_manager/main.py create mode 100644 sonic_package_manager/manager.py create mode 100644 sonic_package_manager/manifest.py create mode 100644 sonic_package_manager/metadata.py create mode 100644 sonic_package_manager/package.py create mode 100644 sonic_package_manager/progress.py create mode 100644 sonic_package_manager/reference.py create mode 100644 sonic_package_manager/registry.py create mode 100644 sonic_package_manager/service_creator/__init__.py create mode 100644 sonic_package_manager/service_creator/creator.py create mode 100644 sonic_package_manager/service_creator/feature.py create mode 100644 sonic_package_manager/service_creator/sonic_db.py create mode 100644 sonic_package_manager/service_creator/utils.py create mode 100644 sonic_package_manager/source.py create mode 100644 sonic_package_manager/utils.py create mode 100644 sonic_package_manager/version.py create mode 100644 tests/sonic_package_manager/conftest.py create mode 100644 tests/sonic_package_manager/test_cli.py create mode 100644 tests/sonic_package_manager/test_constraint.py create mode 100644 tests/sonic_package_manager/test_database.py create mode 100644 tests/sonic_package_manager/test_manager.py create mode 100644 tests/sonic_package_manager/test_manifest.py create mode 100644 tests/sonic_package_manager/test_metadata.py create mode 100644 tests/sonic_package_manager/test_reference.py create mode 100644 tests/sonic_package_manager/test_registry.py create mode 100644 tests/sonic_package_manager/test_service_creator.py create mode 100644 tests/sonic_package_manager/test_utils.py diff --git a/doc/Command-Reference.md b/doc/Command-Reference.md index 66154132551d..ab4e28dbdcd2 100644 --- a/doc/Command-Reference.md +++ b/doc/Command-Reference.md @@ -143,6 +143,7 @@ * [Watermark Show commands](#watermark-show-commands) * [Watermark Config commands](#watermark-config-commands) * [Software Installation and Management](#software-installation-and-management) + * [SONiC Package Manager](#sonic-package-manager) * [SONiC Installer](#sonic-installer) * [Troubleshooting Commands](#troubleshooting-commands) * [Routing Stack](#routing-stack) @@ -7961,8 +7962,316 @@ Go Back To [Beginning of the document](#) or [Beginning of this section](#waterm ## Software Installation and Management -SONiC software can be installed in two methods, viz, "using sonic-installer tool", "ONIE Installer". +SONiC images can be installed in one of two methods: +1. From within a running SONiC image using the `sonic-installer` utility +2. From the vendor's bootloader (E.g., ONIE, Aboot, etc.) +SONiC packages are available as prebuilt Docker images and meant to be installed with the *sonic-package-manager* utility. + +### SONiC Package Manager + +The *sonic-package-manager* is a command line tool to manage (e.g. install, upgrade or uninstall) SONiC Packages. + +**sonic-package-manager list** + +This command lists all available SONiC packages, their desription, installed version and installation status. +SONiC package status can be *Installed*, *Not installed* or *Built-In*. "Built-In" status means that a feature is built-in to SONiC image and can't be upgraded or uninstalled. + +- Usage: + ``` + sonic-package-manager list + ``` + +- Example: + ``` + admin@sonic:~$ sonic-package-manager list + Name Repository Description Version Status + -------------- --------------------------- ---------------------------- --------- -------------- + cpu-report azure/cpu-report CPU report package N/A Not Installed + database docker-database SONiC database package 1.0.0 Built-In + dhcp-relay azure/docker-dhcp-relay SONiC dhcp-relay package 1.0.0 Installed + fpm-frr docker-fpm-frr SONiC fpm-frr package 1.0.0 Built-In + lldp docker-lldp SONiC lldp package 1.0.0 Built-In + macsec docker-macsec SONiC macsec package 1.0.0 Built-In + mgmt-framework docker-sonic-mgmt-framework SONiC mgmt-framework package 1.0.0 Built-In + nat docker-nat SONiC nat package 1.0.0 Built-In + pmon docker-platform-monitor SONiC pmon package 1.0.0 Built-In + radv docker-router-advertiser SONiC radv package 1.0.0 Built-In + sflow docker-sflow SONiC sflow package 1.0.0 Built-In + snmp docker-snmp SONiC snmp package 1.0.0 Built-In + swss docker-orchagent SONiC swss package 1.0.0 Built-In + syncd docker-syncd-mlnx SONiC syncd package 1.0.0 Built-In + teamd docker-teamd SONiC teamd package 1.0.0 Built-In + telemetry docker-sonic-telemetry SONiC telemetry package 1.0.0 Built-In + ``` + +**sonic-package-manager repository add** + +This command will add a new repository as source for SONiC packages to the database. *NOTE*: requires elevated (root) privileges to run + +- Usage: + ``` + Usage: sonic-package-manager repository add [OPTIONS] NAME REPOSITORY + + Add a new repository to database. + + NOTE: This command requires elevated (root) privileges to run. + + Options: + --default-reference TEXT Default installation reference. Can be a tag or + sha256 digest in repository. + --description TEXT Optional package entry description. + --help Show this message and exit. + ``` +- Example: + ``` + admin@sonic:~$ sudo sonic-package-manager repository add \ + cpu-report azure/sonic-cpu-report --default-reference 1.0.0 + ``` + +**sonic-package-manager repository remove** + +This command will remove a repository as source for SONiC packages from the database . The package has to be *Not Installed* in order to be removed from package database. *NOTE*: requires elevated (root) privileges to run + +- Usage: + ``` + Usage: sonic-package-manager repository remove [OPTIONS] NAME + + Remove repository from database. + + NOTE: This command requires elevated (root) privileges to run. + + Options: + --help Show this message and exit. + ``` +- Example: + ``` + admin@sonic:~$ sudo sonic-package-manager repository remove cpu-report + ``` + +**sonic-package-manager install** + +This command pulls and installs a package on SONiC host. *NOTE*: this command requires elevated (root) privileges to run + +- Usage: + ``` + Usage: sonic-package-manager install [OPTIONS] [PACKAGE_EXPR] + + Install/Upgrade package using [PACKAGE_EXPR] in format + "[=|@]". + + The repository to pull the package from is resolved by lookup in + package database, thus the package has to be added via "sonic- + package-manager repository add" command. + + In case when [PACKAGE_EXPR] is a package name "" this command + will install or upgrade to a version referenced by "default- + reference" in package database. + + NOTE: This command requires elevated (root) privileges to run. + + Options: + --enable Set the default state of the feature to enabled + and enable feature right after installation. NOTE: + user needs to execute "config save -y" to make + this setting persistent. + --set-owner [local|kube] Default owner configuration setting for a feature. + --from-repository TEXT Fetch package directly from image registry + repository. NOTE: This argument is mutually + exclusive with arguments: [package_expr, + from_tarball]. + --from-tarball FILE Fetch package from saved image tarball. NOTE: This + argument is mutually exclusive with arguments: + [package_expr, from_repository]. + -f, --force Force operation by ignoring package dependency + tree and package manifest validation failures. + -y, --yes Automatically answer yes on prompts. + -v, --verbosity LVL Either CRITICAL, ERROR, WARNING, INFO or DEBUG. + Default is INFO. + --skip-host-plugins Do not install host OS plugins provided by the + package (CLI, etc). NOTE: In case when package + host OS plugins are set as mandatory in package + manifest this option will fail the installation. + --allow-downgrade Allow package downgrade. By default an attempt to + downgrade the package will result in a failure + since downgrade might not be supported by the + package, thus requires explicit request from the + user. + --help Show this message and exit.. + ``` +- Example: + ``` + admin@sonic:~$ sudo sonic-package-manager install dhcp-relay=1.0.2 + ``` + ``` + admin@sonic:~$ sudo sonic-package-manager install dhcp-relay@latest + ``` + ``` + admin@sonic:~$ sudo sonic-package-manager install dhcp-relay@sha256:9780f6d83e45878749497a6297ed9906c19ee0cc48cc88dc63827564bb8768fd + ``` + ``` + admin@sonic:~$ sudo sonic-package-manager install --from-repository azure/sonic-cpu-report:latest + ``` + ``` + admin@sonic:~$ sudo sonic-package-manager install --from-tarball sonic-docker-image.gz + ``` + +**sonic-package-manager uninstall** + +This command uninstalls package from SONiC host. User needs to stop the feature prior to uninstalling it. +*NOTE*: this command requires elevated (root) privileges to run. + +- Usage: + ``` + Usage: sonic-package-manager uninstall [OPTIONS] NAME + + Uninstall package. + + NOTE: This command requires elevated (root) privileges to run. + + Options: + -f, --force Force operation by ignoring package dependency tree and + package manifest validation failures. + -y, --yes Automatically answer yes on prompts. + -v, --verbosity LVL Either CRITICAL, ERROR, WARNING, INFO or DEBUG. Default + is INFO. + --help Show this message and exit. + ``` +- Example: + ``` + admin@sonic:~$ sudo sonic-package-manager uninstall dhcp-relay + ``` + +**sonic-package-manager reset** + +This comamnd resets the package by reinstalling it to its default version. *NOTE*: this command requires elevated (root) privileges to run. + +- Usage: + ``` + Usage: sonic-package-manager reset [OPTIONS] NAME + + Reset package to the default version. + + NOTE: This command requires elevated (root) privileges to run. + + Options: + -f, --force Force operation by ignoring package dependency tree and + package manifest validation failures. + -y, --yes Automatically answer yes on prompts. + -v, --verbosity LVL Either CRITICAL, ERROR, WARNING, INFO or DEBUG. Default + is INFO. + --skip-host-plugins Do not install host OS plugins provided by the package + (CLI, etc). NOTE: In case when package host OS plugins + are set as mandatory in package manifest this option + will fail the installation. + --help Show this message and exit. + ``` +- Example: + ``` + admin@sonic:~$ sudo sonic-package-manager reset dhcp-relay + ``` + +**sonic-package-manager show package versions** + +This command will retrieve a list of all available versions for the given package from the configured upstream repository + +- Usage: + ``` + Usage: sonic-package-manager show package versions [OPTIONS] NAME + + Show available versions. + + Options: + --all Show all available tags in repository. + --plain Plain output. + --help Show this message and exit. + ``` +- Example: + ``` + admin@sonic:~$ sonic-package-manager show package versions dhcp-relay + • 1.0.0 + • 1.0.2 + • 2.0.0 + ``` + ``` + admin@sonic:~$ sonic-package-manager show package versions dhcp-relay --plain + 1.0.0 + 1.0.2 + 2.0.0 + ``` + ``` + admin@sonic:~$ sonic-package-manager show package versions dhcp-relay --all + • 1.0.0 + • 1.0.2 + • 2.0.0 + • latest + ``` + +**sonic-package-manager show package changelog** + +This command fetches the changelog from the package manifest and displays it. *NOTE*: package changelog can be retrieved from registry or read from image tarball without installing it. + +- Usage: + ``` + Usage: sonic-package-manager show package changelog [OPTIONS] [PACKAGE_EXPR] + + Show package changelog. + + Options: + --from-repository TEXT Fetch package directly from image registry + repository NOTE: This argument is mutually exclusive + with arguments: [from_tarball, package_expr]. + --from-tarball FILE Fetch package from saved image tarball NOTE: This + argument is mutually exclusive with arguments: + [package_expr, from_repository]. + --help Show this message and exit. + ``` +- Example: + ``` + admin@sonic:~$ sonic-package-manager show package changelog dhcp-relay + 1.0.0: + + • Initial release + + Author (author@email.com) Mon, 25 May 2020 12:25:00 +0300 + ``` + +**sonic-package-manager show package manifest** + +This command fetches the package manifest and displays it. *NOTE*: package manifest can be retrieved from registry or read from image tarball without installing it. + +- Usage: + ``` + Usage: sonic-package-manager show package manifest [OPTIONS] [PACKAGE_EXPR] + + Show package manifest. + + Options: + --from-repository TEXT Fetch package directly from image registry + repository NOTE: This argument is mutually exclusive + with arguments: [package_expr, from_tarball]. + --from-tarball FILE Fetch package from saved image tarball NOTE: This + argument is mutually exclusive with arguments: + [from_repository, package_expr]. + -v, --verbosity LVL Either CRITICAL, ERROR, WARNING, INFO or DEBUG + --help Show this message and exit. + ``` +- Example: + ``` + admin@sonic:~$ sonic-package-manager show package manifest dhcp-relay=2.0.0 + { + "version": "1.0.0", + "package": { + "version": "2.0.0", + "depends": [ + "database>=1.0.0,<2.0.0" + ] + }, + "service": { + "name": "dhcp_relay" + } + } + ``` ### SONiC Installer This is a command line tool available as part of the SONiC software; If the device is already running the SONiC software, this tool can be used to install an alternate image in the partition. @@ -8033,6 +8342,13 @@ This command is used to install a new image on the alternate image partition. T Done ``` +Installing a new image using the sonic-installer will keep using the packages installed on the currently running SONiC image and automatically migrate those. In order to perform clean SONiC installation use the *--skip-package-migration* option: + +- Example: + ``` + admin@sonic:~$ sudo sonic-installer install https://sonic-jenkins.westus.cloudapp.azure.com/job/xxxx/job/buildimage-xxxx-all/xxx/artifact/target/sonic-xxxx.bin --skip-package-migration + ``` + **sonic-installer set_default** This command is be used to change the image which can be loaded by default in all the subsequent reboots. diff --git a/setup.py b/setup.py index cd706eb433c2..15f93b46f759 100644 --- a/setup.py +++ b/setup.py @@ -48,6 +48,8 @@ 'show.plugins', 'sonic_installer', 'sonic_installer.bootloader', + 'sonic_package_manager', + 'sonic_package_manager.service_creator', 'tests', 'undebug', 'utilities_common', @@ -151,13 +153,21 @@ 'sonic-clear = clear.main:cli', 'sonic-installer = sonic_installer.main:sonic_installer', 'sonic_installer = sonic_installer.main:sonic_installer', # Deprecated + 'sonic-package-manager = sonic_package_manager.main:cli', + 'spm = sonic_package_manager.main:cli', 'undebug = undebug.main:cli', 'watchdogutil = watchdogutil.main:watchdogutil', ] }, install_requires=[ 'click==7.0', + 'click-log==0.3.2', + 'docker==4.4.4', + 'docker-image-py==0.1.10', + 'filelock==3.0.12', + 'enlighten==1.8.0', 'ipaddress==1.0.23', + 'jinja2==2.11.3', 'jsondiff==1.2.0', 'jsonpatch==1.32.0', 'm2crypto==0.31.0', @@ -165,6 +175,8 @@ 'netaddr==0.8.0', 'netifaces==0.10.7', 'pexpect==4.8.0', + 'poetry-semver==0.1.0', + 'prettyprinter==0.18.0', 'pyroute2==0.5.14', 'requests==2.25.0', 'sonic-config-engine', @@ -173,6 +185,7 @@ 'sonic-yang-mgmt', 'swsssdk>=2.0.1', 'tabulate==0.8.2', + 'www-authenticate==0.9.2', 'xmltodict==0.12.0', ], setup_requires= [ @@ -180,6 +193,7 @@ 'wheel' ], tests_require = [ + 'pyfakefs', 'pytest', 'mockredispy>=2.9.3', 'deepdiff==5.2.3' diff --git a/sonic-utilities-data/bash_completion.d/sonic-package-manager b/sonic-utilities-data/bash_completion.d/sonic-package-manager new file mode 100644 index 000000000000..a8a245660359 --- /dev/null +++ b/sonic-utilities-data/bash_completion.d/sonic-package-manager @@ -0,0 +1,8 @@ +_sonic_package_manager_completion() { + COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \ + COMP_CWORD=$COMP_CWORD \ + _SONIC_PACKAGE_MANAGER_COMPLETE=complete $1 ) ) + return 0 +} + +complete -F _sonic_package_manager_completion -o default sonic-package-manager; diff --git a/sonic-utilities-data/bash_completion.d/spm b/sonic-utilities-data/bash_completion.d/spm new file mode 120000 index 000000000000..3fff069223b3 --- /dev/null +++ b/sonic-utilities-data/bash_completion.d/spm @@ -0,0 +1 @@ +sonic-package-manager \ No newline at end of file diff --git a/sonic-utilities-data/templates/dump.sh.j2 b/sonic-utilities-data/templates/dump.sh.j2 new file mode 100644 index 000000000000..ebb7ed8f2452 --- /dev/null +++ b/sonic-utilities-data/templates/dump.sh.j2 @@ -0,0 +1,29 @@ +#!/bin/bash + +# +# =============== Managed by SONiC Package Manager. DO NOT EDIT! =============== +# auto-generated from {{ source }} by sonic-package-manager +# + +service="{{ manifest.service.name }}" +dump_command="{{ manifest.package['debug-dump'] }}" +container_re="^${service}[0-9]*$" +{% raw %} +container_ids="$(docker ps -f name=${container_re} -f status=running --format {{.Names}})" +{% endraw %} +tmp_dir=$(mktemp -d) +tmp_dump_dir="$tmp_dir/$service" +tmp_archive=$(mktemp) + +mkdir -p "$tmp_dump_dir" + +for container_id in $container_ids; do + docker exec -t "${container_id}" ${dump_command} &> "${tmp_dump_dir}/${container_id}" +done + + +tar -C $(dirname $tmp_dump_dir) -cf $tmp_archive $service + +cat $tmp_archive +rm $tmp_archive +rm -rf $tmp_dir diff --git a/sonic-utilities-data/templates/service_mgmt.sh.j2 b/sonic-utilities-data/templates/service_mgmt.sh.j2 new file mode 100644 index 000000000000..e46ba47380a0 --- /dev/null +++ b/sonic-utilities-data/templates/service_mgmt.sh.j2 @@ -0,0 +1,149 @@ +#!/bin/bash + +# +# =============== Managed by SONiC Package Manager. DO NOT EDIT! =============== +# auto-generated from {{ source }} by sonic-package-manager +# + +SERVICE="{{ manifest.service.name }}" +NAMESPACE_PREFIX="asic" +SONIC_DB_CLI="sonic-db-cli" +TMPDIR="/tmp/" +DEBUGLOG="${TMPDIR}/${SERVICE}.log" +[[ ! -z $DEV ]] && DEBUGLOG="${TMPDIR}/${SERVICE}-${DEV}.log" +[[ ! -z $DEV ]] && NET_NS="${NAMESPACE_PREFIX}${DEV}" # name of the network namespace +[[ ! -z $DEV ]] && SONIC_DB_CLI="${SONIC_DB_CLI} -n ${NET_NS}" + +{%- for service in manifest.service.dependent %} +{%- if service in multi_instance_services %} +MULTI_INST_DEPENDENT="${MULTI_INST_DEPENDENT} {{ service }}" +{%- else %} +DEPENDENT="${DEPENDENT} {{ service }}" +{%- endif %} +{%- endfor %} + +# Update dependent list based on other packages requirements +if [[ -f /etc/sonic/${SERVICE}_dependent ]]; then + DEPENDENT="${DEPENDENT} $(cat /etc/sonic/${SERVICE}_dependent)" +fi + +if [[ -f /etc/sonic/${SERVICE}_multi_inst_dependent ]]; then + MULTI_INST_DEPENDENT="${MULTI_INST_DEPENDENT} cat /etc/sonic/${SERVICE}_multi_inst_dependent" +fi + +function debug() +{ + /usr/bin/logger $1 + /bin/echo `date` "- $1" >> ${DEBUGLOG} +} + +function check_warm_boot() +{ + SYSTEM_WARM_START=`$SONIC_DB_CLI STATE_DB hget "WARM_RESTART_ENABLE_TABLE|system" enable` + SERVICE_WARM_START=`$SONIC_DB_CLI STATE_DB hget "WARM_RESTART_ENABLE_TABLE|${SERVICE}" enable` + if [[ x"$SYSTEM_WARM_START" == x"true" ]] || [[ x"$SERVICE_WARM_START" == x"true" ]]; then + WARM_BOOT="true" +{#- TODO: restore count validation for SONiC packages #} + else + WARM_BOOT="false" + fi +} + +function check_fast_boot() +{ + if [[ $($SONIC_DB_CLI STATE_DB GET "FAST_REBOOT|system") == "1" ]]; then + FAST_BOOT="true" + else + FAST_BOOT="false" + fi +} + +function start_dependent_services() { + if [[ x"$WARM_BOOT" != x"true" ]]; then + for dep in ${DEPENDENT}; do + /bin/systemctl start ${dep} + done + for dep in ${MULTI_INST_DEPENDENT}; do + if [[ ! -z $DEV ]]; then + /bin/systemctl start ${dep}@$DEV + else + /bin/systemctl start ${dep} + fi + done + fi +} + +function stop_dependent_services() { + if [[ x"$WARM_BOOT" != x"true" ]] && [[ x"$FAST_BOOT" != x"true" ]]; then + for dep in ${DEPENDENT}; do + /bin/systemctl stop ${dep} + done + for dep in ${MULTI_INST_DEPENDENT}; do + if [[ ! -z $DEV ]]; then + /bin/systemctl stop ${dep}@$DEV + else + /bin/systemctl stop ${dep} + fi + done + fi +} + +function start() { + debug "Starting ${SERVICE}$DEV service..." + + # start service docker + /usr/bin/${SERVICE}.sh start $DEV + debug "Started ${SERVICE}$DEV service..." + +{%- if manifest.service["post-start-action"] %} + docker exec -t ${SERVICE}${DEV} {{ manifest.service["post-start-action"] }} +{%- endif %} +} + +function wait() { + start_dependent_services + + if [[ ! -z $DEV ]]; then + /usr/bin/${SERVICE}.sh wait $DEV + else + /usr/bin/${SERVICE}.sh wait + fi +} + +function stop() { + debug "Stopping ${SERVICE}$DEV service..." + +{%- if manifest.service["pre-shutdown-action"] %} + docker exec -t ${SERVICE}${DEV} {{ manifest.service["pre-shutdown-action"] }} +{%- endif %} + + # For WARM/FAST boot do not perform service stop + if [[ x"$WARM_BOOT" != x"true" ]] && [[ x"$FAST_BOOT" != x"true" ]]; then + /usr/bin/${SERVICE}.sh stop $DEV + else + docker kill ${SERVICE}$DEV &> /dev/null || debug "Docker ${SERVICE}$DEV is not running ($?) ..." + fi + + debug "Stopped ${SERVICE}$DEV service..." + + stop_dependent_services +} + +OP=$1 +DEV=$2 + +check_warm_boot +check_fast_boot + +debug "Fast boot flag: ${SERVICE}$DEV ${FAST_BOOT}." +debug "Warm boot flag: ${SERVICE}$DEV ${WARM_BOOT}." + +case "$OP" in + start|wait|stop) + $1 + ;; + *) + echo "Usage: $0 {start|wait|stop}" + exit 1 + ;; +esac diff --git a/sonic-utilities-data/templates/sonic.service.j2 b/sonic-utilities-data/templates/sonic.service.j2 new file mode 100644 index 000000000000..72d6ab698c78 --- /dev/null +++ b/sonic-utilities-data/templates/sonic.service.j2 @@ -0,0 +1,39 @@ +# +# =============== Managed by SONiC Package Manager. DO NOT EDIT! =============== +# auto-generated from {{ source }} by sonic-package-manager +# +{%- set path = '/usr/local/bin' %} +{%- set multi_instance = multi_instance|default(False) %} +{%- set multi_instance_services = multi_instance_services|default([]) %} +[Unit] +Description={{ manifest.service.name }} container +{%- for service in manifest.service.requires %} +Requires={{ service }}{% if multi_instance and service in multi_instance_services %}@%i{% endif %}.service +{%- endfor %} +{%- for service in manifest.service.requisite %} +Requisite={{ service }}{% if multi_instance and service in multi_instance_services %}@%i{% endif %}.service +{%- endfor %} +{%- for service in manifest.service.after %} +After={{ service }}{% if multi_instance and service in multi_instance_services %}@%i{% endif %}.service +{%- endfor %} +{%- for service in manifest.service.before %} +Before={{ service }}{% if multi_instance and service in multi_instance_services %}@%i{% endif %}.service +{%- endfor %} +BindsTo=sonic.target +After=sonic.target +StartLimitIntervalSec=1200 +StartLimitBurst=3 + +[Service] +ExecStartPre={{path}}/{{manifest.service.name}}.sh start{% if multi_instance %} %i{% endif %} +ExecStart={{path}}/{{manifest.service.name}}.sh wait{% if multi_instance %} %i{% endif %} +ExecStop={{path}}/{{manifest.service.name}}.sh stop{% if multi_instance %} %i{% endif %} +RestartSec=30 + +{%- if not manifest.service.delayed %} +[Install] +WantedBy=sonic.target +{%- for service in manifest.service["wanted-by"] %} +WantedBy={{ service }}{% if multi_instance and service in multi_instance_services %}@%i{% endif %}.service +{%- endfor %} +{%- endif %} diff --git a/sonic-utilities-data/templates/timer.unit.j2 b/sonic-utilities-data/templates/timer.unit.j2 new file mode 100644 index 000000000000..a757b8deb859 --- /dev/null +++ b/sonic-utilities-data/templates/timer.unit.j2 @@ -0,0 +1,15 @@ +# +# =============== Managed by SONiC Package Manager. DO NOT EDIT! =============== +# auto-generated from {{ source }} by sonic-package-manager +# +[Unit] +Description=Delays {{ manifest.service.name }} until SONiC has started +PartOf={{ manifest.service.name }}{% if multi_instance %}@%i{% endif %}.service + +[Timer] +OnUnitActiveSec=0 sec +OnBootSec=3min 30 sec +Unit={{ manifest.service.name }}{% if multi_instance %}@%i{% endif %}.service + +[Install] +WantedBy=timers.target sonic.target diff --git a/sonic_installer/bootloader/aboot.py b/sonic_installer/bootloader/aboot.py index 3bf3e297e732..a2ef2acf4fcd 100644 --- a/sonic_installer/bootloader/aboot.py +++ b/sonic_installer/bootloader/aboot.py @@ -19,7 +19,6 @@ HOST_PATH, IMAGE_DIR_PREFIX, IMAGE_PREFIX, - ROOTFS_NAME, run_command, run_command_or_raise, ) @@ -189,14 +188,14 @@ def _get_swi_file_offset(self, swipath, filename): return f._fileobj.tell() # pylint: disable=protected-access @contextmanager - def get_rootfs_path(self, image_path): - rootfs_path = os.path.join(image_path, ROOTFS_NAME) - if os.path.exists(rootfs_path) and not isSecureboot(): - yield rootfs_path + def get_path_in_image(self, image_path, path): + path_in_image = os.path.join(image_path, path) + if os.path.exists(path_in_image) and not isSecureboot(): + yield path_in_image return swipath = os.path.join(image_path, DEFAULT_SWI_IMAGE) - offset = self._get_swi_file_offset(swipath, ROOTFS_NAME) + offset = self._get_swi_file_offset(swipath, path) loopdev = subprocess.check_output(['losetup', '-f']).decode('utf8').rstrip() try: diff --git a/sonic_installer/bootloader/bootloader.py b/sonic_installer/bootloader/bootloader.py index b59c9edccde9..a6694977ae3b 100644 --- a/sonic_installer/bootloader/bootloader.py +++ b/sonic_installer/bootloader/bootloader.py @@ -9,7 +9,6 @@ HOST_PATH, IMAGE_DIR_PREFIX, IMAGE_PREFIX, - ROOTFS_NAME, ) class Bootloader(object): @@ -71,6 +70,6 @@ def get_image_path(cls, image): return image.replace(IMAGE_PREFIX, prefix) @contextmanager - def get_rootfs_path(self, image_path): + def get_path_in_image(self, image_path, path_in_image): """returns the path to the squashfs""" - yield path.join(image_path, ROOTFS_NAME) + yield path.join(image_path, path_in_image) diff --git a/sonic_installer/common.py b/sonic_installer/common.py index c49aaac032d8..ac1416789f1c 100644 --- a/sonic_installer/common.py +++ b/sonic_installer/common.py @@ -14,6 +14,9 @@ IMAGE_PREFIX = 'SONiC-OS-' IMAGE_DIR_PREFIX = 'image-' ROOTFS_NAME = 'fs.squashfs' +UPPERDIR_NAME = 'rw' +WORKDIR_NAME = 'work' +DOCKERDIR_NAME = 'docker' # Run bash command and print output to stdout def run_command(command): diff --git a/sonic_installer/main.py b/sonic_installer/main.py index 92ad7677f4d7..12a2ab7e0e12 100644 --- a/sonic_installer/main.py +++ b/sonic_installer/main.py @@ -1,4 +1,5 @@ import configparser +import contextlib import os import re import subprocess @@ -11,7 +12,14 @@ from swsscommon.swsscommon import SonicV2Connector from .bootloader import get_bootloader -from .common import run_command, run_command_or_raise, IMAGE_PREFIX +from .common import ( + run_command, run_command_or_raise, + IMAGE_PREFIX, + ROOTFS_NAME, + UPPERDIR_NAME, + WORKDIR_NAME, + DOCKERDIR_NAME, +) from .exception import SonicRuntimeException SYSLOG_IDENTIFIER = "sonic-installer" @@ -218,17 +226,48 @@ def print_deprecation_warning(deprecated_cmd_or_subcmd, new_cmd_or_subcmd): fg="red", err=True) click.secho("Please use '{}' instead".format(new_cmd_or_subcmd), fg="red", err=True) -def update_sonic_environment(click, bootloader, binary_image_version): + +def mount_squash_fs(squashfs_path, mount_point): + run_command_or_raise(["mkdir", "-p", mount_point]) + run_command_or_raise(["mount", "-t", "squashfs", squashfs_path, mount_point]) + + +def umount(mount_point, read_only=True, recursive=False, force=True, remove_dir=True): + flags = [] + if read_only: + flags.append("-r") + if force: + flags.append("-f") + if recursive: + flags.append("-R") + run_command_or_raise(["umount", *flags, mount_point]) + if remove_dir: + run_command_or_raise(["rm", "-rf", mount_point]) + + +def mount_overlay_fs(lowerdir, upperdir, workdir, mount_point): + run_command_or_raise(["mkdir", "-p", mount_point]) + overlay_options = "rw,relatime,lowerdir={},upperdir={},workdir={}".format(lowerdir, upperdir, workdir) + run_command_or_raise(["mount", "overlay", "-t", "overlay", "-o", overlay_options, mount_point]) + + +def mount_bind(source, mount_point): + run_command_or_raise(["mkdir", "-p", mount_point]) + run_command_or_raise(["mount", "--bind", source, mount_point]) + + +def mount_procfs_chroot(root): + run_command_or_raise(["chroot", root, "mount", "proc", "/proc", "-t", "proc"]) + + +def mount_sysfs_chroot(root): + run_command_or_raise(["chroot", root, "mount", "sysfs", "/sys", "-t", "sysfs"]) + + +def update_sonic_environment(bootloader, binary_image_version): """Prepare sonic environment variable using incoming image template file. If incoming image template does not exist use current image template file. """ - def mount_next_image_fs(squashfs_path, mount_point): - run_command_or_raise(["mkdir", "-p", mount_point]) - run_command_or_raise(["mount", "-t", "squashfs", squashfs_path, mount_point]) - - def umount_next_image_fs(mount_point): - run_command_or_raise(["umount", "-rf", mount_point]) - run_command_or_raise(["rm", "-rf", mount_point]) SONIC_ENV_TEMPLATE_FILE = os.path.join("usr", "share", "sonic", "templates", "sonic-environment.j2") SONIC_VERSION_YML_FILE = os.path.join("etc", "sonic", "sonic_version.yml") @@ -239,9 +278,9 @@ def umount_next_image_fs(mount_point): env_dir = os.path.join(new_image_dir, "sonic-config") env_file = os.path.join(env_dir, "sonic-environment") - with bootloader.get_rootfs_path(new_image_dir) as new_image_squashfs_path: + with bootloader.get_path_in_image(new_image_dir, ROOTFS_NAME) as new_image_squashfs_path: try: - mount_next_image_fs(new_image_squashfs_path, new_image_mount) + mount_squash_fs(new_image_squashfs_path, new_image_mount) next_sonic_env_template_file = os.path.join(new_image_mount, SONIC_ENV_TEMPLATE_FILE) next_sonic_version_yml_file = os.path.join(new_image_mount, SONIC_VERSION_YML_FILE) @@ -264,7 +303,62 @@ def umount_next_image_fs(mount_point): os.remove(env_file) os.rmdir(env_dir) finally: - umount_next_image_fs(new_image_mount) + umount(new_image_mount) + + +def migrate_sonic_packages(bootloader, binary_image_version): + """ Migrate SONiC packages to new SONiC image. """ + + SONIC_PACKAGE_MANAGER = "sonic-package-manager" + PACKAGE_MANAGER_DIR = "/var/lib/sonic-package-manager/" + DOCKER_CTL_SCRIPT = "/usr/lib/docker/docker.sh" + DOCKERD_SOCK = "docker.sock" + VAR_RUN_PATH = "/var/run/" + + tmp_dir = "tmp" + packages_file = "packages.json" + packages_path = os.path.join(PACKAGE_MANAGER_DIR, packages_file) + sonic_version = re.sub(IMAGE_PREFIX, '', binary_image_version) + new_image_dir = bootloader.get_image_path(binary_image_version) + + with contextlib.ExitStack() as stack: + def get_path(path): + """ Closure to get path by entering + a context manager of bootloader.get_path_in_image """ + + return stack.enter_context(bootloader.get_path_in_image(new_image_dir, path)) + + new_image_squashfs_path = get_path(ROOTFS_NAME) + new_image_upper_dir = get_path(UPPERDIR_NAME) + new_image_work_dir = get_path(WORKDIR_NAME) + new_image_docker_dir = get_path(DOCKERDIR_NAME) + new_image_mount = os.path.join("/", tmp_dir, "image-{0}-fs".format(sonic_version)) + new_image_docker_mount = os.path.join(new_image_mount, "var", "lib", "docker") + + try: + mount_squash_fs(new_image_squashfs_path, new_image_mount) + # make sure upper dir and work dir exist + run_command_or_raise(["mkdir", "-p", new_image_upper_dir]) + run_command_or_raise(["mkdir", "-p", new_image_work_dir]) + mount_overlay_fs(new_image_mount, new_image_upper_dir, new_image_work_dir, new_image_mount) + mount_bind(new_image_docker_dir, new_image_docker_mount) + mount_procfs_chroot(new_image_mount) + mount_sysfs_chroot(new_image_mount) + run_command_or_raise(["chroot", new_image_mount, DOCKER_CTL_SCRIPT, "start"]) + run_command_or_raise(["cp", packages_path, os.path.join(new_image_mount, tmp_dir, packages_file)]) + run_command_or_raise(["touch", os.path.join(new_image_mount, "tmp", DOCKERD_SOCK)]) + run_command_or_raise(["mount", "--bind", + os.path.join(VAR_RUN_PATH, DOCKERD_SOCK), + os.path.join(new_image_mount, "tmp", DOCKERD_SOCK)]) + run_command_or_raise(["chroot", new_image_mount, SONIC_PACKAGE_MANAGER, "migrate", + os.path.join("/", tmp_dir, packages_file), + "--dockerd-socket", os.path.join("/", tmp_dir, DOCKERD_SOCK), + "-y"]) + finally: + run_command("chroot {} {} stop".format(new_image_mount, DOCKER_CTL_SCRIPT)) + umount(new_image_mount, recursive=True, read_only=False, remove_dir=False) + umount(new_image_mount) + # Main entrypoint @click.group(cls=AliasedGroup) @@ -286,8 +380,10 @@ def sonic_installer(): help="Force installation of an image of a type which differs from that of the current running image") @click.option('--skip_migration', is_flag=True, help="Do not migrate current configuration to the newly installed image") +@click.option('--skip-package-migration', is_flag=True, + help="Do not migrate current packages to the newly installed image") @click.argument('url') -def install(url, force, skip_migration=False): +def install(url, force, skip_migration=False, skip_package_migration=False): """ Install image from local binary or URL""" bootloader = get_bootloader() @@ -331,7 +427,10 @@ def install(url, force, skip_migration=False): else: run_command('config-setup backup') - update_sonic_environment(click, bootloader, binary_image_version) + update_sonic_environment(bootloader, binary_image_version) + + if not skip_package_migration: + migrate_sonic_packages(bootloader, binary_image_version) # Finally, sync filesystem run_command("sync;sync;sync") diff --git a/sonic_package_manager/__init__.py b/sonic_package_manager/__init__.py new file mode 100644 index 000000000000..9d8827c5e4d3 --- /dev/null +++ b/sonic_package_manager/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python + +from sonic_package_manager.manager import PackageManager + +__all__ = ['PackageManager'] diff --git a/sonic_package_manager/constraint.py b/sonic_package_manager/constraint.py new file mode 100644 index 000000000000..af5a13000ba6 --- /dev/null +++ b/sonic_package_manager/constraint.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python + +""" Package version constraints module. """ + +import re +from abc import ABC +from dataclasses import dataclass, field +from typing import Dict, Union + +import semver + + +class VersionConstraint(semver.VersionConstraint, ABC): + """ Extends VersionConstraint from semver package. """ + + @staticmethod + def parse(constraint_expression: str) -> 'VersionConstraint': + """ Parse version constraint. + + Args: + constraint_expression: Expression syntax: "[[op][version]]+". + Returns: + The resulting VersionConstraint object. + """ + + return semver.parse_constraint(constraint_expression) + + +@dataclass +class ComponentConstraints: + """ ComponentConstraints is a set of components version constraints. """ + + components: Dict[str, VersionConstraint] = field(default_factory=dict) + + @staticmethod + def parse(constraints: Dict) -> 'ComponentConstraints': + """ Parse constraint from dictionary. + + Args: + constraints: dictionary with component name + as key and constraint expression as value + + Returns: + ComponentConstraints object. + + """ + + components = {component: VersionConstraint.parse(version) + for component, version in constraints.items()} + return ComponentConstraints(components) + + def deparse(self) -> Dict[str, str]: + """ Returns the manifest representation of components constraints. + + Returns: + Dictionary of string keys and string values. + + """ + + return { + component: str(version) for component, version in self.components.items() + } + + +@dataclass +class PackageConstraint: + """ PackageConstraint is a package version constraint. """ + + name: str + constraint: VersionConstraint + _components: ComponentConstraints = ComponentConstraints({}) + + def __str__(self): return f'{self.name}{self.constraint}' + + @property + def components(self): return self._components.components + + @staticmethod + def from_string(constraint_expression: str) -> 'PackageConstraint': + """ Parse package constraint string which contains a package + name separated by a space with zero, one or more version constraint + expressions. A variety of version matching operators are supported + including >, <, ==, !=, ^, *. See Examples. + + Args: + constraint_expression: Expression syntax "[package name] [[op][version]]+". + + Returns: + PackageConstraint object. + + Examples: + >>> PackageConstraint.parse('syncd^1.0.0').constraint + =1.0.0,<2.0.0)> + >>> PackageConstraint.parse('swss>1.3.2 <4.2.1').constraint + 1.3.2,<4.2.1)> + >>> PackageConstraint.parse('swss').constraint + + """ + + REQUIREMENT_SPECIFIER_RE = \ + r'(?P[A-Za-z0-9_-]+)(?P.*)' + + match = re.match(REQUIREMENT_SPECIFIER_RE, constraint_expression) + if match is None: + raise ValueError(f'Invalid constraint {constraint_expression}') + groupdict = match.groupdict() + name = groupdict.get('name') + constraint = groupdict.get('constraint') or '*' + return PackageConstraint(name, VersionConstraint.parse(constraint)) + + @staticmethod + def from_dict(constraint_dict: Dict) -> 'PackageConstraint': + """ Parse package constraint information from dictionary. E.g: + + { + "name": "swss", + "version": "^1.0.0", + "componenets": { + "libswsscommon": "^1.0.0" + } + } + + Args: + constraint_dict: Dictionary of constraint infromation. + + Returns: + PackageConstraint object. + """ + + name = constraint_dict['name'] + version = VersionConstraint.parse(constraint_dict.get('version') or '*') + components = ComponentConstraints.parse(constraint_dict.get('components', {})) + return PackageConstraint(name, version, components) + + @staticmethod + def parse(constraint: Union[str, Dict]) -> 'PackageConstraint': + """ Parse constraint from string expression or dictionary. + + Args: + constraint: string or dictionary. Check from_str() and from_dict() methods. + + Returns: + PackageConstraint object. + + """ + + if type(constraint) is str: + return PackageConstraint.from_string(constraint) + elif type(constraint) is dict: + return PackageConstraint.from_dict(constraint) + else: + raise ValueError('Input argument should be either str or dict') + + def deparse(self) -> Dict: + """ Returns the manifest representation of package constraint. + + Returns: + Dictionary in manifest representation. + + """ + + return { + 'name': self.name, + 'version': str(self.constraint), + 'components': self._components.deparse(), + } diff --git a/sonic_package_manager/database.py b/sonic_package_manager/database.py new file mode 100644 index 000000000000..6c1cec5c07e9 --- /dev/null +++ b/sonic_package_manager/database.py @@ -0,0 +1,222 @@ +#!/usr/bin/env python + +""" Repository Database interface module. """ +import json +import os +from dataclasses import dataclass, replace +from typing import Optional, Dict, Callable + +from sonic_package_manager.errors import PackageManagerError, PackageNotFoundError, PackageAlreadyExistsError +from sonic_package_manager.version import Version + +BASE_LIBRARY_PATH = '/var/lib/sonic-package-manager/' +PACKAGE_MANAGER_DB_FILE_PATH = os.path.join(BASE_LIBRARY_PATH, 'packages.json') +PACKAGE_MANAGER_LOCK_FILE = os.path.join(BASE_LIBRARY_PATH, '.lock') + + +@dataclass(order=True) +class PackageEntry: + """ Package database single entry object. + + Attributes: + name: Name of the package + repository: Default repository to pull package from. + description: Package description or None if package does not + provide a description. + default_reference: Default reference (tag or digest) or None + if default reference is not provided. + version: Installed version of the package or None if + package is not installed. + installed: Boolean flag whether the package is installed. + built_in: Boolean flag whether the package is built in. + image_id: Image ID for this package or None if package + is not installed. + """ + + name: str + repository: Optional[str] + description: Optional[str] = None + default_reference: Optional[str] = None + version: Optional[Version] = None + installed: bool = False + built_in: bool = False + image_id: Optional[str] = None + + +def package_from_dict(name: str, package_info: Dict) -> PackageEntry: + """ Parse dictionary into PackageEntry object.""" + + repository = package_info.get('repository') + description = package_info.get('description') + default_reference = package_info.get('default-reference') + version = package_info.get('installed-version') + if version: + version = Version.parse(version) + installed = package_info.get('installed', False) + built_in = package_info.get('built-in', False) + image_id = package_info.get('image-id') + + return PackageEntry(name, repository, description, + default_reference, version, installed, + built_in, image_id) + + +def package_to_dict(package: PackageEntry) -> Dict: + """ Serialize package into dictionary. """ + + return { + 'repository': package.repository, + 'description': package.description, + 'default-reference': package.default_reference, + 'installed-version': None if package.version is None else str(package.version), + 'installed': package.installed, + 'built-in': package.built_in, + 'image-id': package.image_id, + } + + +class PackageDatabase: + """ An interface to SONiC repository database """ + + def __init__(self, + database: Dict[str, PackageEntry], + on_save: Optional[Callable] = None): + """ Initialize PackageDatabase. + + Args: + database: Database dictionary + on_save: Optional callback to execute on commit() + """ + + self._database = database + self._on_save = on_save + + def add_package(self, + name: str, + repository: str, + description: Optional[str] = None, + default_reference: Optional[str] = None): + """ Adds a new package entry in database. + + Args: + name: Package name. + repository: Repository URL. + description: Description string. + default_reference: Default version string. + + Raises: + PackageAlreadyExistsError: if package already exists in database. + """ + + if self.has_package(name): + raise PackageAlreadyExistsError(name) + + package = PackageEntry(name, repository, description, default_reference) + self._database[name] = package + + def remove_package(self, name: str): + """ Removes package entry from database. + + Args: + name: repository name. + Raises: + PackageNotFoundError: Raises when package with the given name does not exist + in the database. + """ + + pkg = self.get_package(name) + + if pkg.built_in: + raise PackageManagerError(f'Package {name} is built-in, cannot remove it') + + if pkg.installed: + raise PackageManagerError(f'Package {name} is installed, uninstall it first') + + self._database.pop(name) + + def update_package(self, pkg: PackageEntry): + """ Modify repository in the database. + + Args: + pkg: Repository object. + Raises: + PackageManagerError: Raises when repository with the given name does not exist + in the database. + """ + + name = pkg.name + + if not self.has_package(name): + raise PackageNotFoundError(name) + + self._database[name] = pkg + + def get_package(self, name: str) -> PackageEntry: + """ Return a package referenced by name. + If the package is not found PackageNotFoundError is thrown. + + Args: + name: Package name. + Returns: + PackageInfo object. + Raises: + PackageNotFoundError: When package called name was not found. + """ + + try: + pkg = self._database[name] + except KeyError: + raise PackageNotFoundError(name) + + return replace(pkg) + + def has_package(self, name: str) -> bool: + """ Checks if the database contains an entry for a package. + called name. Returns True if the package exists, otherwise False. + + Args: + name: Package name. + Returns: + True if the package exists, otherwise False. + """ + + try: + self.get_package(name) + return True + except PackageNotFoundError: + return False + + def __iter__(self): + """ Iterates over packages in the database. + + Yields: + PackageInfo object. + """ + + for name, _ in self._database.items(): + yield self.get_package(name) + + @staticmethod + def from_file(db_file=PACKAGE_MANAGER_DB_FILE_PATH) -> 'PackageDatabase': + """ Read database content from file. """ + + def on_save(database): + with open(db_file, 'w') as db: + db_content = {} + for name, package in database.items(): + db_content[name] = package_to_dict(package) + json.dump(db_content, db, indent=4) + + database = {} + with open(db_file) as db: + db_content = json.load(db) + for key in db_content: + package = package_from_dict(key, db_content[key]) + database[key] = package + return PackageDatabase(database, on_save) + + def commit(self): + """ Save database content to file. """ + + if self._on_save: + self._on_save(self._database) diff --git a/sonic_package_manager/dockerapi.py b/sonic_package_manager/dockerapi.py new file mode 100644 index 000000000000..926600d0bc19 --- /dev/null +++ b/sonic_package_manager/dockerapi.py @@ -0,0 +1,226 @@ +#!/usr/bin/evn python + +""" Module provides Docker interface. """ + +import contextlib +import io +import tarfile +import re +from typing import Optional + +from sonic_package_manager.logger import log +from sonic_package_manager.progress import ProgressManager + + +def is_digest(ref: str): + return ref.startswith('sha256:') + + +def bytes_to_mb(bytes): + return bytes / 1024 / 1024 + + +def get_id(line): + return line['id'] + + +def get_status(line): + return line['status'] + + +def get_progress(line): + progress = line['progressDetail'] + current = bytes_to_mb(progress['current']) + total = bytes_to_mb(progress['total']) + return current, total + + +def process_progress(progress_manager, line): + try: + status = get_status(line) + id = get_id(line) + current, total = get_progress(line) + + if id not in progress_manager: + progress_manager.new(id, + total=total, + unit='Mb', + desc=f'{status} {id}') + pbar = progress_manager.get(id) + + # Complete status + if 'complete' in status: + pbar.desc = f'{status} {id}' + pbar.update(pbar.total) + return + + # Status changed + if status not in pbar.desc: + pbar.desc = f'{status} {id}' + pbar.total = total + pbar.count = 0 + + pbar.update(current - pbar.count) + except KeyError: + # not a progress line + return + + +def get_repository_from_image(image): + """ Returns the first RepoTag repository + found in image. """ + + repotags = image.attrs['RepoTags'] + for repotag in repotags: + repository, tag = repotag.split(':') + return repository + + +class DockerApi: + """ DockerApi provides a set of methods - + wrappers around docker client methods """ + + def __init__(self, + client, + progress_manager: Optional[ProgressManager] = None): + self.client = client + self.progress_manager = progress_manager + + def pull(self, repository: str, + reference: Optional[str] = None): + """ Docker 'pull' command. + Args: + repository: repository to pull + reference: tag or digest + """ + + log.debug(f'pulling image from {repository} reference={reference}') + + api = self.client.api + progress_manager = self.progress_manager + + digest = None + + with progress_manager or contextlib.nullcontext(): + for line in api.pull(repository, + reference, + stream=True, + decode=True): + log.debug(f'pull status: {line}') + + status = get_status(line) + + # Record pulled digest + digest_match = re.match(r'Digest: (?P.*)', status) + if digest_match: + digest = digest_match.groupdict()['sha'] + + if progress_manager: + process_progress(progress_manager, line) + + log.debug(f'Digest: {digest}') + log.debug(f'image from {repository} reference={reference} pulled successfully') + + return self.get_image(f'{repository}@{digest}') + + def load(self, imgpath: str): + """ Docker 'load' command. + Args: + + """ + + log.debug(f'loading image from {imgpath}') + + api = self.client.api + progress_manager = self.progress_manager + + imageid = None + repotag = None + + with progress_manager or contextlib.nullcontext(): + with open(imgpath, 'rb') as imagefile: + for line in api.load_image(imagefile, quiet=False): + log.debug(f'pull status: {line}') + + if progress_manager: + process_progress(progress_manager, line) + + if 'stream' not in line: + continue + + stream = line['stream'] + repotag_match = re.match(r'Loaded image: (?P.*)\n', stream) + if repotag_match: + repotag = repotag_match.groupdict()['repotag'] + imageid_match = re.match(r'Loaded image ID: sha256:(?P.*)\n', stream) + if imageid_match: + imageid = imageid_match.groupdict()['id'] + + imagename = repotag if repotag else imageid + log.debug(f'Loaded image {imagename}') + + return self.get_image(imagename) + + def rmi(self, image: str, **kwargs): + """ Docker 'rmi -f' command. """ + + log.debug(f'removing image {image} kwargs={kwargs}') + + self.client.images.remove(image, **kwargs) + + log.debug(f'image {image} removed successfully') + + def tag(self, image: str, repotag: str, **kwargs): + """ Docker 'tag' command """ + + log.debug(f'tagging image {image} {repotag} kwargs={kwargs}') + + img = self.client.images.get(image) + img.tag(repotag, **kwargs) + + log.debug(f'image {image} tagged {repotag} successfully') + + def rm(self, container: str, **kwargs): + """ Docker 'rm' command. """ + + self.client.containers.get(container).remove(**kwargs) + log.debug(f'removed container {container}') + + def ps(self, **kwargs): + """ Docker 'ps' command. """ + + return self.client.containers.list(**kwargs) + + def labels(self, image: str): + """ Returns a list of labels associated with image. """ + + log.debug(f'inspecting image labels {image}') + + labels = self.client.images.get(image).labels + + log.debug(f'image {image} labels successfully: {labels}') + return labels + + def get_image(self, name: str): + return self.client.images.get(name) + + def extract(self, image, src_path: str, dst_path: str): + """ Copy src_path from the docker image to host dst_path. """ + + buf = bytes() + + container = self.client.containers.create(image) + try: + bits, _ = container.get_archive(src_path) + for chunk in bits: + buf += chunk + finally: + container.remove(force=True) + + with tarfile.open(fileobj=io.BytesIO(buf)) as tar: + for member in tar: + if dst_path.endswith('/'): + tar.extract(member, dst_path) + else: + member.name = dst_path + tar.extract(member, dst_path) diff --git a/sonic_package_manager/errors.py b/sonic_package_manager/errors.py new file mode 100644 index 000000000000..17279c52c4ca --- /dev/null +++ b/sonic_package_manager/errors.py @@ -0,0 +1,146 @@ +#!/usr/bin/env python + +""" SONiC Package Manager exceptions are defined in this module. """ + +from dataclasses import dataclass +from typing import Optional + +from sonic_package_manager.constraint import PackageConstraint, VersionConstraint +from sonic_package_manager.version import Version + + +class PackageManagerError(Exception): + """ Base class for exceptions generated by SONiC package manager """ + + pass + + +class ManifestError(Exception): + """ Class for manifest validate failures. """ + + pass + + +class MetadataError(Exception): + """ Class for metadata failures. """ + + pass + + +@dataclass +class PackageNotFoundError(PackageManagerError): + """ Repository not found in repository database exception """ + + name: str + + def __str__(self): + return f'Package {self.name} is not found in packages database' + + +@dataclass +class PackageAlreadyExistsError(PackageManagerError): + """ Package already exists in the packages database exception. """ + + name: str + + def __str__(self): + return f'Package {self.name} already exists in packages database' + + +class PackageInstallationError(PackageManagerError): + """ Exception for package installation error. """ + + pass + + +class PackageUninstallationError(PackageManagerError): + """ Exception for package installation error. """ + + pass + + +class PackageUpgradeError(PackageManagerError): + """ Exception for package upgrade error. """ + + pass + + +@dataclass +class PackageSonicRequirementError(PackageInstallationError): + """ Exception for installation errors, when SONiC version requirement is not met. """ + + name: str + component: str + constraint: PackageConstraint + installed_ver: Optional[Version] = None + + def __str__(self): + if self.installed_ver is not None: + return (f'Package {self.name} requires base OS component {self.component} version {self.constraint} ' + f'while the installed version is {self.installed_ver}') + return (f'Package {self.name} requires base OS component {self.component} version {self.constraint} ' + f'but it is not present int base OS image') + + +@dataclass +class PackageDependencyError(PackageInstallationError): + """ Exception class for installation errors related to missing dependency. """ + + name: str + constraint: PackageConstraint + installed_ver: Optional[Version] = None + + def __str__(self): + if self.installed_ver: + return (f'Package {self.name} requires {self.constraint} ' + f'but version {self.installed_ver} is installed') + return f'Package {self.name} requires {self.constraint} but it is not installed' + + +@dataclass +class PackageComponentDependencyError(PackageInstallationError): + """ Exception class for installation error caused by component + version dependency. """ + + name: str + dependency: str + component: str + constraint: VersionConstraint + installed_ver: Optional[Version] = None + + def __str__(self): + if self.installed_ver: + return (f'Package {self.name} requires {self.component} {self.constraint} ' + f'in package {self.dependency} but version {self.installed_ver} is installed') + return (f'Package {self.name} requires {self.component} {self.constraint} ' + f'in package {self.dependency} but it is not installed') + + +@dataclass +class PackageConflictError(PackageInstallationError): + """ Exception class for installation errors related to missing dependency. """ + + name: str + constraint: PackageConstraint + installed_ver: Version + + def __str__(self): + return (f'Package {self.name} conflicts with {self.constraint} but ' + f'version {self.installed_ver} is installed') + + +@dataclass +class PackageComponentConflictError(PackageInstallationError): + """ Exception class for installation error caused by component + version conflict. """ + + name: str + dependency: str + component: str + constraint: VersionConstraint + installed_ver: Version + + def __str__(self): + return (f'Package {self.name} conflicts with {self.component} {self.constraint} ' + f'in package {self.dependency} but version {self.installed_ver} is installed') + diff --git a/sonic_package_manager/logger.py b/sonic_package_manager/logger.py new file mode 100644 index 000000000000..3d5e06d35f20 --- /dev/null +++ b/sonic_package_manager/logger.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python + +""" Logger for sonic-package-manager. """ + +import logging.handlers + +import click_log + + +class Formatter(click_log.ColorFormatter): + """ Click logging formatter. """ + + colors = { + 'error': dict(fg='red'), + 'exception': dict(fg='red'), + 'critical': dict(fg='red'), + 'debug': dict(fg='blue', bold=True), + 'warning': dict(fg='yellow'), + } + + +log = logging.getLogger("sonic-package-manager") +log.setLevel(logging.INFO) + +click_handler = click_log.ClickHandler() +click_handler.formatter = Formatter() + +log.addHandler(click_handler) +log.addHandler(logging.handlers.SysLogHandler()) diff --git a/sonic_package_manager/main.py b/sonic_package_manager/main.py new file mode 100644 index 000000000000..c0589ae5b5f0 --- /dev/null +++ b/sonic_package_manager/main.py @@ -0,0 +1,460 @@ +#!/usr/bin/env python + +import functools +import json +import os +import sys +import typing + +import click +import click_log +import tabulate +from natsort import natsorted + +from sonic_package_manager.database import PackageEntry, PackageDatabase +from sonic_package_manager.errors import PackageManagerError +from sonic_package_manager.logger import log +from sonic_package_manager.manager import PackageManager + +BULLET_UC = '\u2022' + + +def exit_cli(*args, **kwargs): + """ Print a message and exit with rc 1. """ + + click.secho(*args, **kwargs) + sys.exit(1) + + +def show_help(ctx): + """ Show help message and exit process successfully. """ + + click.echo(ctx.get_help()) + ctx.exit(0) + + +def root_privileges_required(func: typing.Callable) -> typing.Callable: + """ Decorates a function, so that the function is invoked + only if the user is root. """ + + @functools.wraps(func) + def wrapped_function(*args, **kwargs): + """ Wrapper around func. """ + + if os.geteuid() != 0: + exit_cli('Root privileges required for this operation', fg='red') + + return func(*args, **kwargs) + + wrapped_function.__doc__ += '\n\n NOTE: This command requires elevated (root) privileges to run.' + + return wrapped_function + + +def add_options(options): + """ Decorator to append options from + input list to command. """ + + def _add_options(func): + for option in reversed(options): + func = option(func) + return func + + return _add_options + + +class MutuallyExclusiveOption(click.Option): + """ This options type is extended with 'mutually_exclusive' + parameter which makes CLI to check if several options are now + used together in single command. """ + + def __init__(self, *args, **kwargs): + self.mutually_exclusive = set(kwargs.pop('mutually_exclusive', [])) + help_string = kwargs.get('help', '') + if self.mutually_exclusive: + ex_str = ', '.join(self.mutually_exclusive) + kwargs['help'] = f'{help_string} ' \ + f'NOTE: This argument is mutually ' \ + f'exclusive with arguments: [{ex_str}].' + super().__init__(*args, **kwargs) + + def handle_parse_result(self, ctx, opts, args): + if self.name in opts and opts[self.name] is not None: + for opt_name in self.mutually_exclusive.intersection(opts): + if opts[opt_name] is None: + continue + + raise click.UsageError(f'Illegal usage: {self.name} is mutually ' + f'exclusive with arguments ' + f'{", ".join(self.mutually_exclusive)}.') + + return super().handle_parse_result(ctx, opts, args) + + +PACKAGE_SOURCE_OPTIONS = [ + click.option('--from-repository', + help='Fetch package directly from image registry repository.', + cls=MutuallyExclusiveOption, + mutually_exclusive=['from_tarball', 'package_expr']), + click.option('--from-tarball', + type=click.Path(exists=True, + readable=True, + file_okay=True, + dir_okay=False), + help='Fetch package from saved image tarball.', + cls=MutuallyExclusiveOption, + mutually_exclusive=['from_repository', 'package_expr']), + click.argument('package-expr', + type=str, + required=False) +] + + +PACKAGE_COMMON_INSTALL_OPTIONS = [ + click.option('--skip-host-plugins', + is_flag=True, + help='Do not install host OS plugins provided by the package (CLI, etc). ' + 'NOTE: In case when package host OS plugins are set as mandatory in ' + 'package manifest this option will fail the installation.') +] + + +PACKAGE_COMMON_OPERATION_OPTIONS = [ + click.option('-f', '--force', + is_flag=True, + help='Force operation by ignoring package dependency tree and package manifest validation failures.'), + click.option('-y', '--yes', + is_flag=True, + help='Automatically answer yes on prompts.'), + click_log.simple_verbosity_option(log, help='Either CRITICAL, ERROR, WARNING, INFO or DEBUG. Default is INFO.'), +] + + +def get_package_status(package: PackageEntry): + """ Returns the installation status message for package. """ + + if package.built_in: + return 'Built-In' + elif package.installed: + return 'Installed' + else: + return 'Not Installed' + + +@click.group() +@click.pass_context +def cli(ctx): + """ SONiC Package Manager """ + + ctx.obj = PackageManager.get_manager() + + +@cli.group() +@click.pass_context +def repository(ctx): + """ Repository management commands. """ + + pass + + +@cli.group() +@click.pass_context +def show(ctx): + """ Package manager show commands. """ + + pass + + +@show.group() +@click.pass_context +def package(ctx): + """ Package show commands. """ + + pass + + +@cli.command() +@click.pass_context +def list(ctx): + """ List available packages. """ + + table_header = ['Name', 'Repository', 'Description', 'Version', 'Status'] + table_body = [] + + manager: PackageManager = ctx.obj + + try: + for package in natsorted(manager.database): + repository = package.repository or 'N/A' + version = package.version or 'N/A' + description = package.description or 'N/A' + status = get_package_status(package) + + table_body.append([ + package.name, + repository, + description, + version, + status + ]) + + click.echo(tabulate.tabulate(table_body, table_header)) + except PackageManagerError as err: + exit_cli(f'Failed to list repositories: {err}', fg='red') + + +@package.command() +@add_options(PACKAGE_SOURCE_OPTIONS) +@click.pass_context +def manifest(ctx, + package_expr, + from_repository, + from_tarball): + """ Show package manifest. """ + + manager: PackageManager = ctx.obj + + try: + source = manager.get_package_source(package_expr, + from_repository, + from_tarball) + package = source.get_package() + click.echo(json.dumps(package.manifest.unmarshal(), indent=4)) + except Exception as err: + exit_cli(f'Failed to print manifest: {err}', fg='red') + + +@package.command() +@click.argument('name') +@click.option('--all', is_flag=True, help='Show all available tags in repository.') +@click.option('--plain', is_flag=True, help='Plain output.') +@click.pass_context +def versions(ctx, name, all, plain): + """ Show available versions. """ + + try: + manager: PackageManager = ctx.obj + versions = manager.get_package_available_versions(name, all) + for version in versions: + if not plain: + click.secho(f'{BULLET_UC} ', bold=True, fg='green', nl=False) + click.secho(f'{version}') + except Exception as err: + exit_cli(f'Failed to get package versions for {name}: {err}', fg='red') + + +@package.command() +@add_options(PACKAGE_SOURCE_OPTIONS) +@click.pass_context +def changelog(ctx, + package_expr, + from_repository, + from_tarball): + """ Show package changelog. """ + + manager: PackageManager = ctx.obj + + try: + source = manager.get_package_source(package_expr, + from_repository, + from_tarball) + package = source.get_package() + changelog = package.manifest['package']['changelog'] + + if not changelog: + raise PackageManagerError(f'No changelog for package {package.name}') + + for version, entry in changelog.items(): + author = entry.get('author') or 'N/A' + email = entry.get('email') or 'N/A' + changes = entry.get('changes') or [] + date = entry.get('date') or 'N/A' + click.secho(f'{version}:\n', fg='green', bold=True) + for line in changes: + click.secho(f' {BULLET_UC} {line}', bold=True) + click.secho(f'\n {author} ' + f'({email}) {date}', fg='green', bold=True) + click.secho('') + + except Exception as err: + exit_cli(f'Failed to print package changelog: {err}', fg='red') + + +@repository.command() +@click.argument('name', type=str) +@click.argument('repository', type=str) +@click.option('--default-reference', type=str, help='Default installation reference. Can be a tag or sha256 digest in repository.') +@click.option('--description', type=str, help='Optional package entry description.') +@click.pass_context +@root_privileges_required +def add(ctx, name, repository, default_reference, description): + """ Add a new repository to database. """ + + manager: PackageManager = ctx.obj + + try: + manager.add_repository(name, + repository, + description=description, + default_reference=default_reference) + except Exception as err: + exit_cli(f'Failed to add repository {name}: {err}', fg='red') + + +@repository.command() +@click.argument("name") +@click.pass_context +@root_privileges_required +def remove(ctx, name): + """ Remove repository from database. """ + + manager: PackageManager = ctx.obj + + try: + manager.remove_repository(name) + except Exception as err: + exit_cli(f'Failed to remove repository {name}: {err}', fg='red') + + +@cli.command() +@click.option('--enable', + is_flag=True, + default=None, + help='Set the default state of the feature to enabled ' + 'and enable feature right after installation. ' + 'NOTE: user needs to execute "config save -y" to make ' + 'this setting persistent.') +@click.option('--set-owner', + type=click.Choice(['local', 'kube']), + default=None, + help='Default owner configuration setting for a feature.') +@click.option('--allow-downgrade', + is_flag=True, + default=None, + help='Allow package downgrade. By default an attempt to downgrade the package ' + 'will result in a failure since downgrade might not be supported by the package, ' + 'thus requires explicit request from the user.') +@add_options(PACKAGE_SOURCE_OPTIONS) +@add_options(PACKAGE_COMMON_OPERATION_OPTIONS) +@add_options(PACKAGE_COMMON_INSTALL_OPTIONS) +@click.pass_context +@root_privileges_required +def install(ctx, + package_expr, + from_repository, + from_tarball, + force, + yes, + enable, + set_owner, + skip_host_plugins, + allow_downgrade): + """ Install/Upgrade package using [PACKAGE_EXPR] in format "[=|@]". + + The repository to pull the package from is resolved by lookup in package database, + thus the package has to be added via "sonic-package-manager repository add" command. + + In case when [PACKAGE_EXPR] is a package name "" this command will install or upgrade + to a version referenced by "default-reference" in package database. """ + + manager: PackageManager = ctx.obj + + package_source = package_expr or from_repository or from_tarball + if not package_source: + exit_cli(f'Package source is not specified', fg='red') + + if not yes and not force: + click.confirm(f'{package_source} is going to be installed, ' + f'continue?', abort=True, show_default=True) + + install_opts = { + 'force': force, + 'skip_host_plugins': skip_host_plugins, + } + if enable is not None: + install_opts['enable'] = enable + if set_owner is not None: + install_opts['default_owner'] = set_owner + if allow_downgrade is not None: + install_opts['allow_downgrade'] = allow_downgrade + + try: + manager.install(package_expr, + from_repository, + from_tarball, + **install_opts) + except Exception as err: + exit_cli(f'Failed to install {package_source}: {err}', fg='red') + except KeyboardInterrupt: + exit_cli(f'Operation canceled by user', fg='red') + + +@cli.command() +@add_options(PACKAGE_COMMON_OPERATION_OPTIONS) +@add_options(PACKAGE_COMMON_INSTALL_OPTIONS) +@click.argument('name') +@click.pass_context +@root_privileges_required +def reset(ctx, name, force, yes, skip_host_plugins): + """ Reset package to the default version. """ + + manager: PackageManager = ctx.obj + + if not yes and not force: + click.confirm(f'Package {name} is going to be reset to default version, ' + f'continue?', abort=True, show_default=True) + + try: + manager.reset(name, force, skip_host_plugins) + except Exception as err: + exit_cli(f'Failed to reset package {name}: {err}', fg='red') + except KeyboardInterrupt: + exit_cli(f'Operation canceled by user', fg='red') + + +@cli.command() +@add_options(PACKAGE_COMMON_OPERATION_OPTIONS) +@click.argument('name') +@click.pass_context +@root_privileges_required +def uninstall(ctx, name, force, yes): + """ Uninstall package. """ + + manager: PackageManager = ctx.obj + + if not yes and not force: + click.confirm(f'Package {name} is going to be uninstalled, ' + f'continue?', abort=True, show_default=True) + + try: + manager.uninstall(name, force) + except Exception as err: + exit_cli(f'Failed to uninstall package {name}: {err}', fg='red') + except KeyboardInterrupt: + exit_cli(f'Operation canceled by user', fg='red') + + +@cli.command() +@add_options(PACKAGE_COMMON_OPERATION_OPTIONS) +@click.option('--dockerd-socket', type=click.Path()) +@click.argument('database', type=click.Path()) +@click.pass_context +@root_privileges_required +def migrate(ctx, database, force, yes, dockerd_socket): + """ Migrate packages from the given database file. """ + + manager: PackageManager = ctx.obj + + if not yes and not force: + click.confirm('Continue with package migration?', abort=True, show_default=True) + + try: + manager.migrate_packages(PackageDatabase.from_file(database), dockerd_socket) + except Exception as err: + exit_cli(f'Failed to migrate packages {err}', fg='red') + except KeyboardInterrupt: + exit_cli(f'Operation canceled by user', fg='red') + + +if __name__ == "__main__": + cli() diff --git a/sonic_package_manager/manager.py b/sonic_package_manager/manager.py new file mode 100644 index 000000000000..ba437534edad --- /dev/null +++ b/sonic_package_manager/manager.py @@ -0,0 +1,931 @@ +#!/usr/bin/env python + +import contextlib +import functools +import os +import pkgutil +import tempfile +from inspect import signature +from typing import Any, Iterable, Callable, Dict, Optional + +import docker +import filelock +from sonic_py_common import device_info + +from sonic_package_manager import utils +from sonic_package_manager.constraint import ( + VersionConstraint, + PackageConstraint +) +from sonic_package_manager.database import ( + PACKAGE_MANAGER_LOCK_FILE, + PackageDatabase +) +from sonic_package_manager.dockerapi import DockerApi +from sonic_package_manager.errors import ( + PackageManagerError, + PackageDependencyError, + PackageComponentDependencyError, + PackageConflictError, + PackageComponentConflictError, + PackageInstallationError, + PackageSonicRequirementError, + PackageUninstallationError, + PackageUpgradeError +) +from sonic_package_manager.logger import log +from sonic_package_manager.metadata import MetadataResolver +from sonic_package_manager.package import Package +from sonic_package_manager.progress import ProgressManager +from sonic_package_manager.reference import PackageReference +from sonic_package_manager.registry import RegistryResolver +from sonic_package_manager.service_creator.creator import ( + ServiceCreator, + run_command +) +from sonic_package_manager.service_creator.feature import FeatureRegistry +from sonic_package_manager.service_creator.sonic_db import SonicDB +from sonic_package_manager.service_creator.utils import in_chroot +from sonic_package_manager.source import ( + PackageSource, + LocalSource, + RegistrySource, + TarballSource +) +from sonic_package_manager.utils import DockerReference +from sonic_package_manager.version import ( + Version, + VersionRange, + version_to_tag, + tag_to_version +) + + +@contextlib.contextmanager +def failure_ignore(ignore: bool): + """ Ignores failures based on parameter passed. """ + + try: + yield + except Exception as err: + if ignore: + log.warning(f'ignoring error {err}') + else: + raise + + +def under_lock(func: Callable) -> Callable: + """ Execute operations under lock. """ + + @functools.wraps(func) + def wrapped_function(*args, **kwargs): + self = args[0] + with self.lock: + return func(*args, **kwargs) + + return wrapped_function + + +def opt_check(func: Callable) -> Callable: + """ Check kwargs for function. """ + + @functools.wraps(func) + def wrapped_function(*args, **kwargs): + sig = signature(func) + unsupported_opts = [opt for opt in kwargs if opt not in sig.parameters] + if unsupported_opts: + raise PackageManagerError( + f'Unsupported options {unsupported_opts} for {func.__name__}' + ) + return func(*args, **kwargs) + + return wrapped_function + + +def rollback(func, *args, **kwargs): + """ Used in rollback callbacks to ignore failure + but proceed with rollback. Error will be printed + but not fail the whole procedure of rollback. """ + + @functools.wraps(func) + def wrapper(): + try: + func(*args, **kwargs) + except Exception as err: + log.error(f'failed in rollback: {err}') + + return wrapper + + +def package_constraint_to_reference(constraint: PackageConstraint) -> PackageReference: + package_name, version_constraint = constraint.name, constraint.constraint + # Allow only specific version for now. + # Later we can improve package manager to support + # installing packages using expressions like 'package>1.0.0' + if version_constraint == VersionRange(): # empty range means any version + return PackageReference(package_name, None) + if not isinstance(version_constraint, Version): + raise PackageManagerError(f'Can only install specific version. ' + f'Use only following expression "{package_name}=" ' + f'to install specific version') + return PackageReference(package_name, version_to_tag(version_constraint)) + + +def parse_reference_expression(expression): + try: + return package_constraint_to_reference(PackageConstraint.parse(expression)) + except ValueError: + # if we failed to parse the expression as constraint expression + # we will try to parse it as reference + return PackageReference.parse(expression) + + +def validate_package_base_os_constraints(package: Package, sonic_version_info: Dict[str, str]): + """ Verify that all dependencies on base OS components are met. + Args: + package: Package to check constraints for. + sonic_version_info: SONiC components version information. + Raises: + PackageSonicRequirementError: in case dependency is not satisfied. + """ + + base_os_constraints = package.manifest['package']['base-os'].components + for component, constraint in base_os_constraints.items(): + if component not in sonic_version_info: + raise PackageSonicRequirementError(package.name, component, constraint) + + version = Version.parse(sonic_version_info[component]) + + if not constraint.allows_all(version): + raise PackageSonicRequirementError(package.name, component, constraint, version) + + +def validate_package_tree(packages: Dict[str, Package]): + """ Verify that all dependencies are met in all packages passed to this function. + Args: + packages: list of packages to check + Raises: + PackageDependencyError: if dependency is missing + PackageConflictError: if there is a conflict between packages + """ + + for name, package in packages.items(): + log.debug(f'checking dependencies for {name}') + for dependency in package.manifest['package']['depends']: + dependency_package = packages.get(dependency.name) + if dependency_package is None: + raise PackageDependencyError(package.name, dependency) + + installed_version = dependency_package.version + log.debug(f'dependency package is installed {dependency.name}: {installed_version}') + if not dependency.constraint.allows_all(installed_version): + raise PackageDependencyError(package.name, dependency, installed_version) + + dependency_components = dependency.components + if not dependency_components: + dependency_components = {} + for component, version in package.components.items(): + implicit_constraint = VersionConstraint.parse(f'^{version.major}.{version.minor}.0') + dependency_components[component] = implicit_constraint + + for component, constraint in dependency_components.items(): + if component not in dependency_package.components: + raise PackageComponentDependencyError(package.name, dependency, + component, constraint) + + component_version = dependency_package.components[component] + log.debug(f'dependency package {dependency.name}: ' + f'component {component} version is {component_version}') + + if not constraint.allows_all(component_version): + raise PackageComponentDependencyError(package.name, dependency, component, + constraint, component_version) + + log.debug(f'checking conflicts for {name}') + for conflict in package.manifest['package']['breaks']: + conflicting_package = packages.get(conflict.name) + if conflicting_package is None: + continue + + installed_version = conflicting_package.version + log.debug(f'conflicting package is installed {conflict.name}: {installed_version}') + if conflict.constraint.allows_all(installed_version): + raise PackageConflictError(package.name, conflict, installed_version) + + for component, constraint in conflicting_package.components.items(): + if component not in conflicting_package.components: + continue + + component_version = conflicting_package.components[component] + log.debug(f'conflicting package {dependency.name}: ' + f'component {component} version is {component_version}') + + if constraint.allows_all(component_version): + raise PackageComponentConflictError(package.name, dependency, component, + constraint, component_version) + + +def validate_package_cli_can_be_skipped(package: Package, skip: bool): + """ Checks whether package CLI installation can be skipped. + + Args: + package: Package to validate + skip: Whether to skip installing CLI + + Raises: + PackageManagerError + + """ + + if package.manifest['cli']['mandatory'] and skip: + raise PackageManagerError(f'CLI is mandatory for package {package.name} ' + f'but it was requested to be not installed') + elif skip: + log.warning(f'Package {package.name} CLI plugin will not be installed') + + +class PackageManager: + """ SONiC Package Manager. This class provides public API + for sonic_package_manager python library. It has functionality + for installing, uninstalling, updating SONiC packages as well as + retrieving information about the packages from different sources. """ + + def __init__(self, + docker_api: DockerApi, + registry_resolver: RegistryResolver, + database: PackageDatabase, + metadata_resolver: MetadataResolver, + service_creator: ServiceCreator, + device_information: Any, + lock: filelock.FileLock): + """ Initialize PackageManager. """ + + self.lock = lock + self.docker = docker_api + self.registry_resolver = registry_resolver + self.database = database + self.metadata_resolver = metadata_resolver + self.service_creator = service_creator + self.feature_registry = service_creator.feature_registry + self.is_multi_npu = device_information.is_multi_npu() + self.num_npus = device_information.get_num_npus() + self.version_info = device_information.get_sonic_version_info() + + @under_lock + def add_repository(self, *args, **kwargs): + """ Add repository to package database + and commit database content. + + Args: + args: Arguments to pass to PackageDatabase.add_package + kwargs: Keyword arguments to pass to PackageDatabase.add_package + """ + + self.database.add_package(*args, **kwargs) + self.database.commit() + + @under_lock + def remove_repository(self, name: str): + """ Remove repository from package database + and commit database content. + + Args: + name: package name + """ + + self.database.remove_package(name) + self.database.commit() + + @under_lock + def install(self, + expression: Optional[str] = None, + repotag: Optional[str] = None, + tarball: Optional[str] = None, + **kwargs): + """ Install/Upgrade SONiC Package from either an expression + representing the package and its version, repository and tag or + digest in same format as "docker pulL" accepts or an image tarball path. + + Args: + expression: SONiC Package reference expression + repotag: Install/Upgrade from REPO[:TAG][@DIGEST] + tarball: Install/Upgrade from tarball, path to tarball file + kwargs: Install/Upgrade options for self.install_from_source + Raises: + PackageManagerError + """ + + source = self.get_package_source(expression, repotag, tarball) + package = source.get_package() + + if self.is_installed(package.name): + self.upgrade_from_source(source, **kwargs) + else: + self.install_from_source(source, **kwargs) + + @under_lock + @opt_check + def install_from_source(self, + source: PackageSource, + force=False, + enable=False, + default_owner='local', + skip_host_plugins=False): + """ Install SONiC Package from source represented by PackageSource. + This method contains the logic of package installation. + + Args: + source: SONiC Package source. + force: Force the installation. + enable: If True the installed feature package will be enabled. + default_owner: Owner of the installed package. + skip_host_plugins: Skip CLI plugin installation. + Raises: + PackageManagerError + """ + + package = source.get_package() + name = package.name + + with failure_ignore(force): + if self.is_installed(name): + raise PackageInstallationError(f'{name} is already installed') + + version = package.manifest['package']['version'] + feature_state = 'enabled' if enable else 'disabled' + installed_packages = self._get_installed_packages_and(package) + + with failure_ignore(force): + validate_package_base_os_constraints(package, self.version_info) + validate_package_tree(installed_packages) + validate_package_cli_can_be_skipped(package, skip_host_plugins) + + # After all checks are passed we proceed to actual installation + + # When installing package from a tarball or directly from registry + # package name may not be in database. + if not self.database.has_package(package.name): + self.database.add_package(package.name, package.repository) + + try: + with contextlib.ExitStack() as exits: + source.install(package) + exits.callback(rollback(source.uninstall, package)) + + self.service_creator.create(package, state=feature_state, owner=default_owner) + exits.callback(rollback(self.service_creator.remove, package)) + + if not skip_host_plugins: + self._install_cli_plugins(package) + exits.callback(rollback(self._uninstall_cli_plugins, package)) + + exits.pop_all() + except Exception as err: + raise PackageInstallationError(f'Failed to install {package.name}: {err}') + except KeyboardInterrupt: + raise + + package.entry.installed = True + package.entry.version = version + self.database.update_package(package.entry) + self.database.commit() + + @under_lock + @opt_check + def uninstall(self, name: str, force=False): + """ Uninstall SONiC Package referenced by name. The uninstallation + can be forced if force argument is True. + + Args: + name: SONiC Package name. + force: Force the installation. + Raises: + PackageManagerError + """ + + with failure_ignore(force): + if not self.is_installed(name): + raise PackageUninstallationError(f'{name} is not installed') + + package = self.get_installed_package(name) + service_name = package.manifest['service']['name'] + + with failure_ignore(force): + if self.feature_registry.is_feature_enabled(service_name): + raise PackageUninstallationError( + f'{service_name} is enabled. Disable the feature first') + + if package.built_in: + raise PackageUninstallationError( + f'Cannot uninstall built-in package {package.name}') + + installed_packages = self._get_installed_packages_except(package) + + with failure_ignore(force): + validate_package_tree(installed_packages) + + # After all checks are passed we proceed to actual uninstallation + + try: + self._uninstall_cli_plugins(package) + self.service_creator.remove(package) + + # Clean containers based on this image + containers = self.docker.ps(filters={'ancestor': package.image_id}, + all=True) + for container in containers: + self.docker.rm(container.id, force=True) + + self.docker.rmi(package.image_id, force=True) + package.entry.image_id = None + except Exception as err: + raise PackageUninstallationError( + f'Failed to uninstall {package.name}: {err}' + ) + + package.entry.installed = False + package.entry.version = None + self.database.update_package(package.entry) + self.database.commit() + + @under_lock + @opt_check + def upgrade_from_source(self, + source: PackageSource, + force=False, + skip_host_plugins=False, + allow_downgrade=False): + """ Upgrade SONiC Package to a version the package reference + expression specifies. Can force the upgrade if force parameter + is True. Force can allow a package downgrade. + + Args: + source: SONiC Package source + force: Force the upgrade. + skip_host_plugins: Skip host OS plugins installation. + allow_downgrade: Flag to allow package downgrade. + Raises: + PackageManagerError + """ + + new_package = source.get_package() + name = new_package.name + + with failure_ignore(force): + if not self.is_installed(name): + raise PackageUpgradeError(f'{name} is not installed') + + old_package = self.get_installed_package(name) + + if old_package.built_in: + raise PackageUpgradeError( + f'Cannot upgrade built-in package {old_package.name}' + ) + + old_feature = old_package.manifest['service']['name'] + new_feature = new_package.manifest['service']['name'] + old_version = old_package.manifest['package']['version'] + new_version = new_package.manifest['package']['version'] + + with failure_ignore(force): + if old_version == new_version: + raise PackageUpgradeError(f'{new_version} is already installed') + + # TODO: Not all packages might support downgrade. + # We put a check here but we understand that for some packages + # the downgrade might be safe to do. There can be a variable in manifest + # describing package downgrade ability or downgrade-able versions. + if new_version < old_version and not allow_downgrade: + raise PackageUpgradeError( + f'Request to downgrade from {old_version} to {new_version}. ' + f'Downgrade might be not supported by the package' + ) + + # remove currently installed package from the list + installed_packages = self._get_installed_packages_and(new_package) + + with failure_ignore(force): + validate_package_base_os_constraints(new_package, self.version_info) + validate_package_tree(installed_packages) + validate_package_cli_can_be_skipped(new_package, skip_host_plugins) + + # After all checks are passed we proceed to actual upgrade + + try: + with contextlib.ExitStack() as exits: + self._uninstall_cli_plugins(old_package) + exits.callback(rollback(self._install_cli_plugins, old_package)) + + source.install(new_package) + exits.callback(rollback(source.uninstall, new_package)) + + if self.feature_registry.is_feature_enabled(old_feature): + self._systemctl_action(old_package, 'stop') + exits.callback(rollback(self._systemctl_action, + old_package, 'start')) + + self.service_creator.remove(old_package, deregister_feature=False) + exits.callback(rollback(self.service_creator.create, + old_package, register_feature=False)) + + # Clean containers based on the old image + containers = self.docker.ps(filters={'ancestor': old_package.image_id}, + all=True) + for container in containers: + self.docker.rm(container.id, force=True) + + self.service_creator.create(new_package, register_feature=False) + exits.callback(rollback(self.service_creator.remove, new_package, + register_feature=False)) + + if self.feature_registry.is_feature_enabled(new_feature): + self._systemctl_action(new_package, 'start') + exits.callback(rollback(self._systemctl_action, + new_package, 'stop')) + + if not skip_host_plugins: + self._install_cli_plugins(new_package) + exits.callback(rollback(self._uninstall_cli_plugin, old_package)) + + self.docker.rmi(old_package.image_id, force=True) + + exits.pop_all() + except Exception as err: + raise PackageUpgradeError(f'Failed to upgrade {new_package.name}: {err}') + except KeyboardInterrupt: + raise + + new_package_entry = new_package.entry + new_package_entry.installed = True + new_package_entry.version = new_version + self.database.update_package(new_package_entry) + self.database.commit() + + @under_lock + @opt_check + def reset(self, name: str, force: bool = False, skip_host_plugins: bool = False): + """ Reset package to defaults version + + Args: + name: SONiC Package name. + force: Force the installation. + skip_host_plugins: Skip host plugins installation. + Raises: + PackageManagerError + """ + + with failure_ignore(force): + if not self.is_installed(name): + raise PackageManagerError(f'{name} is not installed') + + package = self.get_installed_package(name) + default_reference = package.entry.default_reference + if default_reference is None: + raise PackageManagerError(f'package {name} has no default reference') + + package_ref = PackageReference(name, default_reference) + source = self.get_package_source(package_ref=package_ref) + self.upgrade_from_source(source, force=force, + allow_downgrade=True, + skip_host_plugins=skip_host_plugins) + + @under_lock + def migrate_packages(self, + old_package_database: PackageDatabase, + dockerd_sock: Optional[str] = None): + """ + Migrate packages from old database. This function can do a comparison between + current database and the database passed in as argument. If the package is + missing in the current database it will be added. If the package is installed + in the passed database and in the current it is not installed it will be + installed with a passed database package version. If the package is installed + in the passed database and it is installed in the current database but with + older version the package will be upgraded to the never version. If the package + is installed in the passed database and in the current it is installed but with + never version - no actions are taken. If dockerd_sock parameter is passed, the + migration process will use loaded images from docker library of the currently + installed image. + + Args: + old_package_database: SONiC Package Database to migrate packages from. + dockerd_sock: Path to dockerd socket. + Raises: + PackageManagerError + """ + + self._migrate_package_database(old_package_database) + + def migrate_package(old_package_entry, + new_package_entry): + """ Migrate package routine + + Args: + old_package_entry: Entry in old package database. + new_package_entry: Entry in new package database. + """ + + name = new_package_entry.name + version = new_package_entry.version + + if dockerd_sock: + # dockerd_sock is defined, so use docked_sock to connect to + # dockerd and fetch package image from it. + log.info(f'installing {name} from old docker library') + docker_api = DockerApi(docker.DockerClient(base_url=f'unix://{dockerd_sock}')) + + image = docker_api.get_image(old_package_entry.image_id) + + with tempfile.NamedTemporaryFile('wb') as file: + for chunk in image.save(named=True): + file.write(chunk) + + self.install(tarball=file.name) + else: + log.info(f'installing {name} version {version}') + + self.install(f'{name}={version}') + + # TODO: Topological sort packages by their dependencies first. + for old_package in old_package_database: + if not old_package.installed or old_package.built_in: + continue + + log.info(f'migrating package {old_package.name}') + + new_package = self.database.get_package(old_package.name) + + if new_package.installed: + if old_package.version > new_package.version: + log.info(f'{old_package.name} package version is greater ' + f'then installed in new image: ' + f'{old_package.version} > {new_package.version}') + log.info(f'upgrading {new_package.name} to {old_package.version}') + new_package.version = old_package.version + migrate_package(old_package, new_package) + else: + log.info(f'skipping {new_package.name} as installed version is newer') + elif new_package.default_reference is not None: + new_package_ref = PackageReference(new_package.name, new_package.default_reference) + package_source = self.get_package_source(package_ref=new_package_ref) + package = package_source.get_package() + new_package_default_version = package.manifest['package']['version'] + if old_package.version > new_package_default_version: + log.info(f'{old_package.name} package version is lower ' + f'then the default in new image: ' + f'{old_package.version} > {new_package_default_version}') + new_package.version = old_package.version + migrate_package(old_package, new_package) + else: + self.install(f'{new_package.name}={new_package_default_version}') + else: + # No default version and package is not installed. + # Migrate old package same version. + new_package.version = old_package.version + migrate_package(old_package, new_package) + + self.database.commit() + + def get_installed_package(self, name: str) -> Package: + """ Get installed package by name. + + Args: + name: package name. + Returns: + Package object. + """ + + package_entry = self.database.get_package(name) + source = LocalSource(package_entry, + self.database, + self.docker, + self.metadata_resolver) + return source.get_package() + + def get_package_source(self, + package_expression: Optional[str] = None, + repository_reference: Optional[str] = None, + tarboll_path: Optional[str] = None, + package_ref: Optional[PackageReference] = None): + """ Returns PackageSource object based on input source. + + Args: + package_expression: SONiC Package expression string + repository_reference: Install from REPO[:TAG][@DIGEST] + tarboll_path: Install from image tarball + package_ref: Package reference object + Returns: + SONiC Package object. + Raises: + ValueError if no source specified. + """ + + if package_expression: + ref = parse_reference_expression(package_expression) + return self.get_package_source(package_ref=ref) + elif repository_reference: + repo_ref = DockerReference.parse(repository_reference) + repository = repo_ref['name'] + reference = repo_ref['tag'] or repo_ref['digest'] + reference = reference or 'latest' + return RegistrySource(repository, + reference, + self.database, + self.docker, + self.metadata_resolver) + elif tarboll_path: + return TarballSource(tarboll_path, + self.database, + self.docker, + self.metadata_resolver) + elif package_ref: + package_entry = self.database.get_package(package_ref.name) + + # Determine the reference if not specified. + # If package is installed assume the installed + # one is requested, otherwise look for default + # reference defined for this package. In case package + # does not have a default reference raise an error. + if package_ref.reference is None: + if package_entry.installed: + return LocalSource(package_entry, + self.database, + self.docker, + self.metadata_resolver) + if package_entry.default_reference is not None: + package_ref.reference = package_entry.default_reference + else: + raise PackageManagerError(f'No default reference tag. ' + f'Please specify the version or tag explicitly') + + return RegistrySource(package_entry.repository, + package_ref.reference, + self.database, + self.docker, + self.metadata_resolver) + else: + raise ValueError('No package source provided') + + def get_package_available_versions(self, + name: str, + all: bool = False) -> Iterable: + """ Returns a list of available versions for package. + + Args: + name: Package name. + all: If set to True will return all tags including + those which do not follow semantic versioning. + Returns: + List of versions + """ + package_info = self.database.get_package(name) + registry = self.registry_resolver.get_registry_for(package_info.repository) + available_tags = registry.tags(package_info.repository) + + def is_semantic_ver_tag(tag: str) -> bool: + try: + tag_to_version(tag) + return True + except ValueError: + pass + return False + + if all: + return available_tags + + return map(tag_to_version, filter(is_semantic_ver_tag, available_tags)) + + def is_installed(self, name: str) -> bool: + """ Returns boolean whether a package called name is installed. + + Args: + name: Package name. + Returns: + True if package is installed, False otherwise. + """ + + if not self.database.has_package(name): + return False + package_info = self.database.get_package(name) + return package_info.installed + + def get_installed_packages(self) -> Dict[str, Package]: + """ Returns a dictionary of installed packages where + keys are package names and values are package objects. + + Returns: + Installed packages dictionary. + """ + + return { + entry.name: self.get_installed_package(entry.name) + for entry in self.database if entry.installed + } + + def _migrate_package_database(self, old_package_database: PackageDatabase): + """ Performs part of package migration process. + For every package in old_package_database that is not listed in current + database add a corresponding entry to current database. """ + + for package in old_package_database: + if not self.database.has_package(package.name): + self.database.add_package(package.name, + package.repository, + package.description, + package.default_reference) + + def _get_installed_packages_and(self, package: Package) -> Dict[str, Package]: + """ Returns a dictionary of installed packages with their names as keys + adding a package provided in the argument. """ + + packages = self.get_installed_packages() + packages[package.name] = package + return packages + + def _get_installed_packages_except(self, package: Package) -> Dict[str, Package]: + """ Returns a dictionary of installed packages with their names as keys + removing a package provided in the argument. """ + + packages = self.get_installed_packages() + packages.pop(package.name) + return packages + + # TODO: Replace with "config feature" command. + # The problem with current "config feature" command + # is that it is asynchronous, thus can't be used + # for package upgrade purposes where we need to wait + # till service stops before upgrading docker image. + # It would be really handy if we could just call + # something like: "config feature state --wait" + # instead of operating on systemd service since + # this is basically a duplicated code from "hostcfgd". + def _systemctl_action(self, package: Package, action: str): + """ Execute systemctl action for a service supporting + multi-asic services. """ + + name = package.manifest['service']['name'] + host_service = package.manifest['service']['host-service'] + asic_service = package.manifest['service']['asic-service'] + single_instance = host_service or (asic_service and not self.is_multi_npu) + multi_instance = asic_service and self.is_multi_npu + + if in_chroot(): + return + + if single_instance: + run_command(f'systemctl {action} {name}') + if multi_instance: + for npu in range(self.num_npus): + run_command(f'systemctl {action} {name}@{npu}') + + @staticmethod + def _get_cli_plugin_name(package: Package): + return utils.make_python_identifier(package.name) + '.py' + + @classmethod + def _get_cli_plugin_path(cls, package: Package, command): + pkg_loader = pkgutil.get_loader(f'{command}.plugins') + if pkg_loader is None: + raise PackageManagerError(f'Failed to get plugins path for {command} CLI') + plugins_pkg_path = os.path.dirname(pkg_loader.path) + return os.path.join(plugins_pkg_path, cls._get_cli_plugin_name(package)) + + def _install_cli_plugins(self, package: Package): + for command in ('show', 'config', 'clear'): + self._install_cli_plugin(package, command) + + def _uninstall_cli_plugins(self, package: Package): + for command in ('show', 'config', 'clear'): + self._uninstall_cli_plugin(package, command) + + def _install_cli_plugin(self, package: Package, command: str): + image_plugin_path = package.manifest['cli'][command] + if not image_plugin_path: + return + host_plugin_path = self._get_cli_plugin_path(package, command) + self.docker.extract(package.entry.image_id, image_plugin_path, host_plugin_path) + + def _uninstall_cli_plugin(self, package: Package, command: str): + image_plugin_path = package.manifest['cli'][command] + if not image_plugin_path: + return + host_plugin_path = self._get_cli_plugin_path(package, command) + if os.path.exists(host_plugin_path): + os.remove(host_plugin_path) + + @staticmethod + def get_manager() -> 'PackageManager': + """ Creates and returns PackageManager instance. + + Returns: + PackageManager + """ + + docker_api = DockerApi(docker.from_env()) + registry_resolver = RegistryResolver() + return PackageManager(DockerApi(docker.from_env(), ProgressManager()), + registry_resolver, + PackageDatabase.from_file(), + MetadataResolver(docker_api, registry_resolver), + ServiceCreator(FeatureRegistry(SonicDB), SonicDB), + device_info, + filelock.FileLock(PACKAGE_MANAGER_LOCK_FILE, timeout=0)) diff --git a/sonic_package_manager/manifest.py b/sonic_package_manager/manifest.py new file mode 100644 index 000000000000..b58a0d10f055 --- /dev/null +++ b/sonic_package_manager/manifest.py @@ -0,0 +1,210 @@ +#!/usr/bin/env python + +from abc import ABC +from dataclasses import dataclass +from typing import Optional, List, Dict, Any + +from sonic_package_manager.constraint import ( + ComponentConstraints, + PackageConstraint +) +from sonic_package_manager.errors import ManifestError +from sonic_package_manager.version import Version + + +class ManifestSchema: + """ ManifestSchema class describes and provides marshalling + and unmarshalling methods. + """ + + class Marshaller: + """ Base class for marshaling and un-marshaling. """ + + def marshal(self, value): + """ Validates and returns a valid manifest dictionary. + + Args: + value: input value to validate. + Returns: valid manifest node. + """ + + raise NotImplementedError + + def unmarshal(self, value): + """ Un-marshals the manifest to a dictionary. + + Args: + value: input value to validate. + Returns: valid manifest node. + """ + + raise NotImplementedError + + @dataclass + class ParsedMarshaller(Marshaller): + """ Marshaller used on types which support class method "parse" """ + + type: Any + + def marshal(self, value): + try: + return self.type.parse(value) + except ValueError as err: + raise ManifestError(f'Failed to marshal {value}: {err}') + + def unmarshal(self, value): + try: + if hasattr(value, 'deparse'): + return value.deparse() + return str(value) + except Exception as err: + raise ManifestError(f'Failed to unmarshal {value}: {err}') + + @dataclass + class DefaultMarshaller(Marshaller): + """ Default marshaller that validates if the given + value is instance of given type. """ + + type: type + + def marshal(self, value): + if not isinstance(value, self.type): + raise ManifestError(f'{value} is not of type {self.type.__name__}') + return value + + def unmarshal(self, value): + return value + + @dataclass + class ManifestNode(Marshaller, ABC): + """ + Base class for any manifest object. + + Attrs: + key: String representing the key for this object. + """ + + key: str + + @dataclass + class ManifestRoot(ManifestNode): + items: List + + def marshal(self, value: Optional[dict]): + result = {} + if value is None: + value = {} + + for item in self.items: + next_value = value.get(item.key) + result[item.key] = item.marshal(next_value) + return result + + def unmarshal(self, value): + return_value = {} + for item in self.items: + return_value[item.key] = item.unmarshal(value[item.key]) + return return_value + + @dataclass + class ManifestField(ManifestNode): + type: Any + default: Optional[Any] = None + + def marshal(self, value): + if value is None: + if self.default is not None: + return self.default + raise ManifestError(f'{self.key} is a required field but it is missing') + try: + return_value = self.type.marshal(value) + except Exception as err: + raise ManifestError(f'Failed to marshal {self.key}: {err}') + return return_value + + def unmarshal(self, value): + return self.type.unmarshal(value) + + @dataclass + class ManifestArray(ManifestNode): + type: Any + + def marshal(self, value): + if value is None: + return [] + + return_value = [] + try: + for item in value: + return_value.append(self.type.marshal(item)) + except Exception as err: + raise ManifestError(f'Failed to convert {self.key}={value} to array: {err}') + + return return_value + + def unmarshal(self, value): + return [self.type.unmarshal(item) for item in value] + + # TODO: add description for each field + SCHEMA = ManifestRoot('root', [ + ManifestField('version', ParsedMarshaller(Version), Version(1, 0, 0)), + ManifestRoot('package', [ + ManifestField('version', ParsedMarshaller(Version)), + ManifestField('name', DefaultMarshaller(str)), + ManifestField('description', DefaultMarshaller(str), ''), + ManifestField('base-os', ParsedMarshaller(ComponentConstraints), ComponentConstraints()), + ManifestArray('depends', ParsedMarshaller(PackageConstraint)), + ManifestArray('breaks', ParsedMarshaller(PackageConstraint)), + ManifestField('init-cfg', DefaultMarshaller(dict), dict()), + ManifestField('changelog', DefaultMarshaller(dict), dict()), + ManifestField('debug-dump', DefaultMarshaller(str), ''), + ]), + ManifestRoot('service', [ + ManifestField('name', DefaultMarshaller(str)), + ManifestArray('requires', DefaultMarshaller(str)), + ManifestArray('requisite', DefaultMarshaller(str)), + ManifestArray('wanted-by', DefaultMarshaller(str)), + ManifestArray('after', DefaultMarshaller(str)), + ManifestArray('before', DefaultMarshaller(str)), + ManifestArray('dependent', DefaultMarshaller(str)), + ManifestArray('dependent-of', DefaultMarshaller(str)), + ManifestField('post-start-action', DefaultMarshaller(str), ''), + ManifestField('pre-shutdown-action', DefaultMarshaller(str), ''), + ManifestField('asic-service', DefaultMarshaller(bool), False), + ManifestField('host-service', DefaultMarshaller(bool), True), + ManifestField('delayed', DefaultMarshaller(bool), False), + ]), + ManifestRoot('container', [ + ManifestField('privileged', DefaultMarshaller(bool), False), + ManifestArray('volumes', DefaultMarshaller(str)), + ManifestArray('mounts', ManifestRoot('mounts', [ + ManifestField('source', DefaultMarshaller(str)), + ManifestField('target', DefaultMarshaller(str)), + ManifestField('type', DefaultMarshaller(str)), + ])), + ManifestField('environment', DefaultMarshaller(dict), dict()), + ManifestArray('tmpfs', DefaultMarshaller(str)), + ]), + ManifestArray('processes', ManifestRoot('processes', [ + ManifestField('name', DefaultMarshaller(str)), + ])), + ManifestRoot('cli', [ + ManifestField('mandatory', DefaultMarshaller(bool), False), + ManifestField('show', DefaultMarshaller(str), ''), + ManifestField('config', DefaultMarshaller(str), ''), + ManifestField('clear', DefaultMarshaller(str), '') + ]) + ]) + + +class Manifest(dict): + """ Manifest object. """ + + SCHEMA = ManifestSchema.SCHEMA + + @classmethod + def marshal(cls, input_dict: dict): + return Manifest(cls.SCHEMA.marshal(input_dict)) + + def unmarshal(self) -> Dict: + return self.SCHEMA.unmarshal(self) diff --git a/sonic_package_manager/metadata.py b/sonic_package_manager/metadata.py new file mode 100644 index 000000000000..7f7c25ceafbc --- /dev/null +++ b/sonic_package_manager/metadata.py @@ -0,0 +1,185 @@ +#!/usr/bin/env python + +from dataclasses import dataclass, field + +import json +import tarfile +from typing import Dict + +from sonic_package_manager.errors import MetadataError +from sonic_package_manager.manifest import Manifest +from sonic_package_manager.version import Version + + +def deep_update(dst: Dict, src: Dict) -> Dict: + """ Deep update dst dictionary with src dictionary. + + Args: + dst: Dictionary to update + src: Dictionary to update with + + Returns: + New merged dictionary. + """ + + for key, value in src.items(): + if isinstance(value, dict): + node = dst.setdefault(key, {}) + deep_update(node, value) + else: + dst[key] = value + return dst + + +def translate_plain_to_tree(plain: Dict[str, str], sep='.') -> Dict: + """ Convert plain key/value dictionary into + a tree by spliting the key with '.' + + Args: + plain: Dictionary to convert into tree-like structure. + Keys in this dictionary have to be in a format: + "[key0].+", e.g: "com.azure.sonic" that + will be converted into tree like struct: + + { + "com": { + "azure": { + "sonic": {} + } + } + } + sep: Seperator string + + Returns: + Tree like structure + + """ + + res = {} + for key, value in plain.items(): + if sep not in key: + res[key] = value + continue + namespace, key = key.split(sep, 1) + res.setdefault(namespace, {}) + deep_update(res[namespace], translate_plain_to_tree({key: value})) + return res + + +@dataclass +class Metadata: + """ Package metadata object that can be retrieved from + OCI image manifest. """ + + manifest: Manifest + components: Dict[str, Version] = field(default_factory=dict) + + +class MetadataResolver: + """ Resolve metadata for package from different sources. """ + + def __init__(self, docker, registry_resolver): + self.docker = docker + self.registry_resolver = registry_resolver + + def from_local(self, image: str) -> Metadata: + """ Reads manifest from locally installed docker image. + + Args: + image: Docker image ID + Returns: + Metadata + Raises: + MetadataError + """ + + labels = self.docker.labels(image) + if labels is None: + raise MetadataError('No manifest found in image labels') + + return self.from_labels(labels) + + def from_registry(self, + repository: str, + reference: str) -> Metadata: + """ Reads manifest from remote registry. + + Args: + repository: Repository to pull image from + reference: Reference, either tag or digest + Returns: + Metadata + Raises: + MetadataError + """ + + registry = self.registry_resolver.get_registry_for(repository) + + manifest = registry.manifest(repository, reference) + digest = manifest['config']['digest'] + + blob = registry.blobs(repository, digest) + labels = blob['config']['Labels'] + if labels is None: + raise MetadataError('No manifest found in image labels') + + return self.from_labels(labels) + + def from_tarball(self, image_path: str) -> Metadata: + """ Reads manifest image tarball. + Args: + image_path: Path to image tarball. + Returns: + Manifest + Raises: + MetadataError + """ + + with tarfile.open(image_path) as image: + manifest = json.loads(image.extractfile('manifest.json').read()) + + blob = manifest[0]['Config'] + image_config = json.loads(image.extractfile(blob).read()) + labels = image_config['config']['Labels'] + if labels is None: + raise MetadataError('No manifest found in image labels') + + return self.from_labels(labels) + + @classmethod + def from_labels(cls, labels: Dict[str, str]) -> Metadata: + """ Get manifest from image labels. + + Args: + labels: key, value string pairs + Returns: + Metadata + Raises: + MetadataError + """ + + metadata_dict = translate_plain_to_tree(labels) + try: + sonic_metadata = metadata_dict['com']['azure']['sonic'] + except KeyError: + raise MetadataError('No metadata found in image labels') + + try: + manifest_string = sonic_metadata['manifest'] + except KeyError: + raise MetadataError('No manifest found in image labels') + + try: + manifest_dict = json.loads(manifest_string) + except (ValueError, TypeError) as err: + raise MetadataError(f'Failed to parse manifest JSON: {err}') + + components = {} + if 'versions' in sonic_metadata: + for component, version in sonic_metadata['versions'].items(): + try: + components[component] = Version.parse(version) + except ValueError as err: + raise MetadataError(f'Failed to parse component version: {err}') + + return Metadata(Manifest.marshal(manifest_dict), components) diff --git a/sonic_package_manager/package.py b/sonic_package_manager/package.py new file mode 100644 index 000000000000..2928f17392f7 --- /dev/null +++ b/sonic_package_manager/package.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python + +from dataclasses import dataclass + +from sonic_package_manager.database import PackageEntry +from sonic_package_manager.metadata import Metadata + + +@dataclass +class Package: + """ Package class is a representation of Package. + + Attributes: + entry: Package entry in package database + metadata: Metadata object for this package + manifest: Manifest for this package + components: Components versions for this package + name: Name of the package from package database + repository: Default repository to pull this package from + image_id: Docker image ID of the installed package; + It is set to None if package is not installed. + installed: Boolean flag whether package is installed or not. + build_in: Boolean flag whether package is built in or not. + + """ + + entry: PackageEntry + metadata: Metadata + + @property + def name(self): return self.entry.name + + @property + def repository(self): return self.entry.repository + + @property + def image_id(self): return self.entry.image_id + + @property + def installed(self): return self.entry.installed + + @property + def built_in(self): return self.entry.built_in + + @property + def version(self): return self.entry.version + + @property + def manifest(self): return self.metadata.manifest + + @property + def components(self): return self.metadata.components + diff --git a/sonic_package_manager/progress.py b/sonic_package_manager/progress.py new file mode 100644 index 000000000000..5258ebab9803 --- /dev/null +++ b/sonic_package_manager/progress.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python + +import enlighten + +BAR_FMT = '{desc}{desc_pad}{percentage:3.0f}%|{bar}| {count:{len_total}.2f}/{total:.2f}{unit_pad}{unit} ' + \ + '[{elapsed}<{eta}, {rate:.2f}{unit_pad}{unit}/s]' + +COUNTER_FMT = '{desc}{desc_pad}{count:.1f} {unit}{unit_pad}' + \ + '[{elapsed}, {rate:.2f}{unit_pad}{unit}/s]{fill}' + + +class ProgressManager: + """ ProgressManager is used for creating multiple progress bars + which nicely interact with logging and prints. """ + + def __init__(self): + self.manager = enlighten.get_manager() + self.pbars = {} + + def __enter__(self): + return self.manager.__enter__() + + def __exit__(self, exc_type, exc_val, exc_tb): + return self.manager.__exit__(exc_type, exc_val, exc_tb) + + def new(self, id: str, *args, **kwargs): + """ Creates new progress bar with id. + Args: + id: progress bar identifier + *args: pass arguments for progress bar creation + **kwargs: pass keyword arguments for progress bar creation. + """ + + if 'bar_format' not in kwargs: + kwargs['bar_format'] = BAR_FMT + if 'counter_format' not in kwargs: + kwargs['counter_format'] = COUNTER_FMT + + self.pbars[id] = self.manager.counter(*args, **kwargs) + + def get(self, id: str): + """ Returns progress bar by id. + Args: + id: progress bar identifier + Returns: + Progress bar. + """ + + return self.pbars[id] + + def __contains__(self, id): + return id in self.pbars diff --git a/sonic_package_manager/reference.py b/sonic_package_manager/reference.py new file mode 100644 index 000000000000..9c4d8e825c6a --- /dev/null +++ b/sonic_package_manager/reference.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python + +import re +from dataclasses import dataclass +from typing import Optional + + +@dataclass +class PackageReference: + """ PackageReference is a package version constraint. """ + + name: str + reference: Optional[str] = None + + def __str__(self): + return f'{self.name} {self.reference}' + + @staticmethod + def parse(expression: str) -> 'PackageReference': + REQUIREMENT_SPECIFIER_RE = \ + r'(?P[A-Za-z0-9_-]+)(?P@(?P.*))' + + match = re.match(REQUIREMENT_SPECIFIER_RE, expression) + if match is None: + raise ValueError(f'Invalid reference specifier {expression}') + groupdict = match.groupdict() + name = groupdict.get('name') + reference = groupdict.get('reference') + + return PackageReference(name, reference) diff --git a/sonic_package_manager/registry.py b/sonic_package_manager/registry.py new file mode 100644 index 000000000000..8a09d9136e98 --- /dev/null +++ b/sonic_package_manager/registry.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python + +import json +from dataclasses import dataclass +from typing import List, Dict + +import requests +import www_authenticate +from docker_image import reference +from prettyprinter import pformat + +from sonic_package_manager.logger import log +from sonic_package_manager.utils import DockerReference + + +class AuthenticationServiceError(Exception): + """ Exception class for errors related to authentication. """ + + pass + + +class AuthenticationService: + """ AuthenticationService provides an authentication tokens. """ + + @staticmethod + def get_token(realm, service, scope) -> str: + """ Retrieve an authentication token. + + Args: + realm: Realm: url to request token. + service: service to request token for. + scope: scope to requests token for. + Returns: + token value as a string. + """ + + log.debug(f'getting authentication token: realm={realm} service={service} scope={scope}') + + response = requests.get(f'{realm}?scope={scope}&service={service}') + if response.status_code != requests.codes.ok: + raise AuthenticationServiceError(f'Failed to retrieve token') + + content = json.loads(response.content) + token = content['token'] + expires_in = content['expires_in'] + + log.debug(f'authentication token for realm={realm} service={service} scope={scope}: ' + f'token={token} expires_in={expires_in}') + + return token + + +@dataclass +class RegistryApiError(Exception): + """ Class for registry related errors. """ + + msg: str + response: requests.Response + + def __str__(self): + code = self.response.status_code + content = self.response.content.decode() + try: + content = json.loads(content) + except ValueError: + pass + return f'{self.msg}: code: {code} details: {pformat(content)}' + + +class Registry: + """ Provides a Docker registry interface. """ + + MIME_DOCKER_MANIFEST = 'application/vnd.docker.distribution.manifest.v2+json' + + def __init__(self, host: str): + self.url = host + + @staticmethod + def _execute_get_request(url, headers): + response = requests.get(url, headers=headers) + if response.status_code == requests.codes.unauthorized: + # Get authentication details from headers + # Registry should tell how to authenticate + www_authenticate_details = response.headers['Www-Authenticate'] + log.debug(f'unauthorized: retrieving authentication details ' + f'from response headers {www_authenticate_details}') + bearer = www_authenticate.parse(www_authenticate_details)['bearer'] + token = AuthenticationService.get_token(**bearer) + headers['Authorization'] = f'Bearer {token}' + # Repeat request + response = requests.get(url, headers=headers) + return response + + def _get_base_url(self, repository: str): + return f'{self.url}/v2/{repository}' + + def tags(self, repository: str) -> List[str]: + log.debug(f'getting tags for {repository}') + + _, repository = reference.Reference.split_docker_domain(repository) + headers = {'Accept': 'application/json'} + url = f'{self._get_base_url(repository)}/tags/list' + response = self._execute_get_request(url, headers) + if response.status_code != requests.codes.ok: + raise RegistryApiError(f'Failed to retrieve tags from {repository}', response) + + content = json.loads(response.content) + log.debug(f'tags list api response: f{content}') + + return content['tags'] + + def manifest(self, repository: str, ref: str) -> Dict: + log.debug(f'getting manifest for {repository}:{ref}') + + _, repository = reference.Reference.split_docker_domain(repository) + headers = {'Accept': self.MIME_DOCKER_MANIFEST} + url = f'{self._get_base_url(repository)}/manifests/{ref}' + response = self._execute_get_request(url, headers) + + if response.status_code != requests.codes.ok: + raise RegistryApiError(f'Failed to retrieve manifest for {repository}:{ref}', response) + + content = json.loads(response.content) + log.debug(f'manifest content for {repository}:{ref}: {content}') + + return content + + def blobs(self, repository: str, digest: str): + log.debug(f'retrieving blob for {repository}:{digest}') + + _, repository = reference.Reference.split_docker_domain(repository) + headers = {'Accept': self.MIME_DOCKER_MANIFEST} + url = f'{self._get_base_url(repository)}/blobs/{digest}' + response = self._execute_get_request(url, headers) + if response.status_code != requests.codes.ok: + raise RegistryApiError(f'Failed to retrieve blobs for {repository}:{digest}', response) + content = json.loads(response.content) + + log.debug(f'retrieved blob for {repository}:{digest}: {content}') + return content + + +class RegistryResolver: + """ Returns a registry object based on the input repository reference + string. """ + + DockerHubRegistry = Registry('https://index.docker.io') + + def __init__(self): + pass + + def get_registry_for(self, ref: str) -> Registry: + domain, _ = DockerReference.split_docker_domain(ref) + if domain == reference.DEFAULT_DOMAIN: + return self.DockerHubRegistry + # TODO: support insecure registries + return Registry(f'https://{domain}') diff --git a/sonic_package_manager/service_creator/__init__.py b/sonic_package_manager/service_creator/__init__.py new file mode 100644 index 000000000000..e2af81ceb59b --- /dev/null +++ b/sonic_package_manager/service_creator/__init__.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python + +ETC_SONIC_PATH = '/etc/sonic' diff --git a/sonic_package_manager/service_creator/creator.py b/sonic_package_manager/service_creator/creator.py new file mode 100644 index 000000000000..54b9315bee0d --- /dev/null +++ b/sonic_package_manager/service_creator/creator.py @@ -0,0 +1,342 @@ +#!/usr/bin/env python + +import contextlib +import os +import stat +import subprocess +from typing import Dict + +import jinja2 as jinja2 +from prettyprinter import pformat + +from sonic_package_manager.logger import log +from sonic_package_manager.package import Package +from sonic_package_manager.service_creator import ETC_SONIC_PATH +from sonic_package_manager.service_creator.feature import FeatureRegistry +from sonic_package_manager.service_creator.utils import in_chroot + +SERVICE_FILE_TEMPLATE = 'sonic.service.j2' +TIMER_UNIT_TEMPLATE = 'timer.unit.j2' + +SYSTEMD_LOCATION = '/usr/lib/systemd/system' + +SERVICE_MGMT_SCRIPT_TEMPLATE = 'service_mgmt.sh.j2' +SERVICE_MGMT_SCRIPT_LOCATION = '/usr/local/bin' + +DOCKER_CTL_SCRIPT_TEMPLATE = 'docker_image_ctl.j2' +DOCKER_CTL_SCRIPT_LOCATION = '/usr/bin' + +DEBUG_DUMP_SCRIPT_TEMPLATE = 'dump.sh.j2' +DEBUG_DUMP_SCRIPT_LOCATION = '/usr/local/bin/debug-dump/' + +TEMPLATES_PATH = '/usr/share/sonic/templates' + + +class ServiceCreatorError(Exception): + pass + + +def render_template(in_template: str, + outfile: str, + render_ctx: Dict, + executable: bool = False): + """ Template renderer helper routine. + Args: + in_template: Input file with template content + outfile: Output file to render template to + render_ctx: Dictionary used to generate jinja2 template + executable: Set executable bit on rendered file + """ + + log.debug(f'Rendering {in_template} to {outfile} with {pformat(render_ctx)}') + + with open(in_template, 'r') as instream: + template = jinja2.Template(instream.read()) + + with open(outfile, 'w') as outstream: + outstream.write(template.render(**render_ctx)) + + if executable: + set_executable_bit(outfile) + + +def get_tmpl_path(template_name: str) -> str: + """ Returns a path to a template. + Args: + template_name: Template file name. + """ + + return os.path.join(TEMPLATES_PATH, template_name) + + +def set_executable_bit(filepath): + """ Sets +x on filepath. """ + + st = os.stat(filepath) + os.chmod(filepath, st.st_mode | stat.S_IEXEC) + + +def run_command(command: str): + """ Run arbitrary bash command. + Args: + command: String command to execute as bash script + Raises: + PackageManagerError: Raised when the command return code + is not 0. + """ + + log.debug(f'running command: {command}') + + proc = subprocess.Popen(command, + shell=True, + executable='/bin/bash', + stdout=subprocess.PIPE) + (out, _) = proc.communicate() + if proc.returncode != 0: + raise ServiceCreatorError(f'Failed to execute "{command}"') + + +class ServiceCreator: + """ Creates and registers services in SONiC based on the package + manifest. """ + + def __init__(self, feature_registry: FeatureRegistry, sonic_db): + self.feature_registry = feature_registry + self.sonic_db = sonic_db + + def create(self, + package: Package, + register_feature=True, + state='enabled', + owner='local'): + try: + self.generate_container_mgmt(package) + self.generate_service_mgmt(package) + self.update_dependent_list_file(package) + self.generate_systemd_service(package) + self.generate_dump_script(package) + + self.set_initial_config(package) + + self.post_operation_hook() + + if register_feature: + self.feature_registry.register(package.manifest, + state, owner) + except (Exception, KeyboardInterrupt): + self.remove(package, register_feature) + raise + + def remove(self, package: Package, deregister_feature=True): + name = package.manifest['service']['name'] + + def remove_file(path): + if os.path.exists(path): + os.remove(path) + log.info(f'removed {path}') + + remove_file(os.path.join(SYSTEMD_LOCATION, f'{name}.service')) + remove_file(os.path.join(SYSTEMD_LOCATION, f'{name}@.service')) + remove_file(os.path.join(SERVICE_MGMT_SCRIPT_LOCATION, f'{name}.sh')) + remove_file(os.path.join(DOCKER_CTL_SCRIPT_LOCATION, f'{name}.sh')) + remove_file(os.path.join(DEBUG_DUMP_SCRIPT_LOCATION, f'{name}')) + + self.update_dependent_list_file(package, remove=True) + + self.post_operation_hook() + + if deregister_feature: + self.feature_registry.deregister(package.manifest['service']['name']) + self.remove_config(package) + + def post_operation_hook(self): + if not in_chroot(): + run_command('systemctl daemon-reload') + + def generate_container_mgmt(self, package: Package): + image_id = package.image_id + name = package.manifest['service']['name'] + container_spec = package.manifest['container'] + script_path = os.path.join(DOCKER_CTL_SCRIPT_LOCATION, f'{name}.sh') + script_template = get_tmpl_path(DOCKER_CTL_SCRIPT_TEMPLATE) + run_opt = [] + + if container_spec['privileged']: + run_opt.append('--privileged') + + run_opt.append('-t') + + for volume in container_spec['volumes']: + run_opt.append(f'-v {volume}') + + for mount in container_spec['mounts']: + mount_type, source, target = mount['type'], mount['source'], mount['target'] + run_opt.append(f'--mount type={mount_type},source={source},target={target}') + + for tmpfs_mount in container_spec['tmpfs']: + run_opt.append(f'--tmpfs {tmpfs_mount}') + + for env_name, value in container_spec['environment'].items(): + run_opt.append(f'-e {env_name}={value}') + + run_opt = ' '.join(run_opt) + render_ctx = { + 'docker_container_name': name, + 'docker_image_id': image_id, + 'docker_image_run_opt': run_opt, + } + render_template(script_template, script_path, render_ctx, executable=True) + log.info(f'generated {script_path}') + + def generate_service_mgmt(self, package: Package): + name = package.manifest['service']['name'] + multi_instance_services = self.feature_registry.get_multi_instance_features() + script_path = os.path.join(SERVICE_MGMT_SCRIPT_LOCATION, f'{name}.sh') + scrip_template = get_tmpl_path(SERVICE_MGMT_SCRIPT_TEMPLATE) + render_ctx = { + 'source': get_tmpl_path(SERVICE_MGMT_SCRIPT_TEMPLATE), + 'manifest': package.manifest.unmarshal(), + 'multi_instance_services': multi_instance_services, + } + render_template(scrip_template, script_path, render_ctx, executable=True) + log.info(f'generated {script_path}') + + def generate_systemd_service(self, package: Package): + name = package.manifest['service']['name'] + multi_instance_services = self.feature_registry.get_multi_instance_features() + + template = get_tmpl_path(SERVICE_FILE_TEMPLATE) + template_vars = { + 'source': get_tmpl_path(SERVICE_FILE_TEMPLATE), + 'manifest': package.manifest.unmarshal(), + 'multi_instance': False, + 'multi_instance_services': multi_instance_services, + } + output_file = os.path.join(SYSTEMD_LOCATION, f'{name}.service') + render_template(template, output_file, template_vars) + log.info(f'generated {output_file}') + + if package.manifest['service']['asic-service']: + output_file = os.path.join(SYSTEMD_LOCATION, f'{name}@.service') + template_vars['multi_instance'] = True + render_template(template, output_file, template_vars) + log.info(f'generated {output_file}') + + if package.manifest['service']['delayed']: + template_vars = { + 'source': get_tmpl_path(TIMER_UNIT_TEMPLATE), + 'manifest': package.manifest.unmarshal(), + 'multi_instance': False, + } + output_file = os.path.join(SYSTEMD_LOCATION, f'{name}.timer') + template = os.path.join(TEMPLATES_PATH, TIMER_UNIT_TEMPLATE) + render_template(template, output_file, template_vars) + log.info(f'generated {output_file}') + + if package.manifest['service']['asic-service']: + output_file = os.path.join(SYSTEMD_LOCATION, f'{name}@.timer') + template_vars['multi_instance'] = True + render_template(template, output_file, template_vars) + log.info(f'generated {output_file}') + + def update_dependent_list_file(self, package: Package, remove=False): + name = package.manifest['service']['name'] + dependent_of = package.manifest['service']['dependent-of'] + host_service = package.manifest['service']['host-service'] + asic_service = package.manifest['service']['asic-service'] + + def update_dependent(service, name, multi_inst): + if multi_inst: + filename = f'{service}_multi_inst_dependent' + else: + filename = f'{service}_dependent' + + filepath = os.path.join(ETC_SONIC_PATH, filename) + + dependent_services = set() + if os.path.exists(filepath): + with open(filepath) as fp: + dependent_services.update({line.strip() for line in fp.readlines()}) + if remove: + with contextlib.suppress(KeyError): + dependent_services.remove(name) + else: + dependent_services.add(name) + with open(filepath, 'w') as fp: + fp.write('\n'.join(dependent_services)) + + for service in dependent_of: + if host_service: + update_dependent(service, name, multi_inst=False) + if asic_service: + update_dependent(service, name, multi_inst=True) + + def generate_dump_script(self, package): + name = package.manifest['service']['name'] + + if not package.manifest['package']['debug-dump']: + return + + if not os.path.exists(DEBUG_DUMP_SCRIPT_LOCATION): + os.mkdir(DEBUG_DUMP_SCRIPT_LOCATION) + + scrip_template = os.path.join(TEMPLATES_PATH, DEBUG_DUMP_SCRIPT_TEMPLATE) + script_path = os.path.join(DEBUG_DUMP_SCRIPT_LOCATION, f'{name}') + render_ctx = { + 'source': get_tmpl_path(SERVICE_MGMT_SCRIPT_TEMPLATE), + 'manifest': package.manifest.unmarshal(), + } + render_template(scrip_template, script_path, render_ctx, executable=True) + log.info(f'generated {script_path}') + + def get_tables(self, table_name): + tables = [] + + running_table = self.sonic_db.running_table(table_name) + if running_table is not None: + tables.append(running_table) + + persistent_table = self.sonic_db.persistent_table(table_name) + if persistent_table is not None: + tables.append(persistent_table) + + initial_table = self.sonic_db.initial_table(table_name) + if initial_table is not None: + tables.append(initial_table) + + return tables + + def set_initial_config(self, package): + init_cfg = package.manifest['package']['init-cfg'] + + for tablename, content in init_cfg.items(): + if not isinstance(content, dict): + continue + + tables = self.get_tables(tablename) + + for key in content: + for table in tables: + cfg = content[key] + exists, old_fvs = table.get(key) + if exists: + cfg.update(old_fvs) + fvs = list(cfg.items()) + table.set(key, fvs) + + def remove_config(self, package): + # Remove configuration based on init-cfg tables, so having + # init-cfg even with tables without keys might be a good idea. + # TODO: init-cfg should be validated with yang model + # TODO: remove config from tables known to yang model + init_cfg = package.manifest['package']['init-cfg'] + + for tablename, content in init_cfg.items(): + if not isinstance(content, dict): + continue + + tables = self.get_tables(tablename) + + for key in content: + for table in tables: + table._del(key) diff --git a/sonic_package_manager/service_creator/feature.py b/sonic_package_manager/service_creator/feature.py new file mode 100644 index 000000000000..4df06384d2f4 --- /dev/null +++ b/sonic_package_manager/service_creator/feature.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python + +""" This module implements new feature registration/de-registration in SONiC system. """ + +from typing import Dict, Type + +from sonic_package_manager.manifest import Manifest +from sonic_package_manager.service_creator.sonic_db import SonicDB + +FEATURE = 'FEATURE' +DEFAULT_FEATURE_CONFIG = { + 'state': 'disabled', + 'auto_restart': 'enabled', + 'high_mem_alert': 'disabled', + 'set_owner': 'local' +} + + +class FeatureRegistry: + """ FeatureRegistry class provides an interface to + register/de-register new feature persistently. """ + + def __init__(self, sonic_db: Type[SonicDB]): + self._sonic_db = sonic_db + + def register(self, + manifest: Manifest, + state: str = 'disabled', + owner: str = 'local'): + name = manifest['service']['name'] + for table in self._get_tables(): + cfg_entries = self.get_default_feature_entries(state, owner) + non_cfg_entries = self.get_non_configurable_feature_entries(manifest) + + exists, current_cfg = table.get(name) + + new_cfg = cfg_entries.copy() + # Override configurable entries with CONFIG DB data. + new_cfg = {**new_cfg, **dict(current_cfg)} + # Override CONFIG DB data with non configurable entries. + new_cfg = {**new_cfg, **non_cfg_entries} + + table.set(name, list(new_cfg.items())) + + def deregister(self, name: str): + for table in self._get_tables(): + table._del(name) + + def is_feature_enabled(self, name: str) -> bool: + """ Returns whether the feature is current enabled + or not. Accesses running CONFIG DB. If no running CONFIG_DB + table is found in tables returns False. """ + + running_db_table = self._sonic_db.running_table(FEATURE) + if running_db_table is None: + return False + + exists, cfg = running_db_table.get(name) + if not exists: + return False + cfg = dict(cfg) + return cfg.get('state').lower() == 'enabled' + + def get_multi_instance_features(self): + res = [] + init_db_table = self._sonic_db.initial_table(FEATURE) + for feature in init_db_table.keys(): + exists, cfg = init_db_table.get(feature) + assert exists + cfg = dict(cfg) + asic_flag = str(cfg.get('has_per_asic_scope', 'False')) + if asic_flag.lower() == 'true': + res.append(feature) + return res + + @staticmethod + def get_default_feature_entries(state=None, owner=None) -> Dict[str, str]: + """ Get configurable feature table entries: + e.g. 'state', 'auto_restart', etc. """ + + cfg = DEFAULT_FEATURE_CONFIG.copy() + if state: + cfg['state'] = state + if owner: + cfg['set_owner'] = owner + return cfg + + @staticmethod + def get_non_configurable_feature_entries(manifest) -> Dict[str, str]: + """ Get non-configurable feature table entries: e.g. 'has_timer' """ + + return { + 'has_per_asic_scope': str(manifest['service']['asic-service']), + 'has_global_scope': str(manifest['service']['host-service']), + 'has_timer': str(manifest['service']['delayed']), + } + + def _get_tables(self): + tables = [] + running = self._sonic_db.running_table(FEATURE) + if running is not None: # it's Ok if there is no database container running + tables.append(running) + persistent = self._sonic_db.persistent_table(FEATURE) + if persistent is not None: # it's Ok if there is no config_db.json + tables.append(persistent) + tables.append(self._sonic_db.initial_table(FEATURE)) # init_cfg.json is must + + return tables diff --git a/sonic_package_manager/service_creator/sonic_db.py b/sonic_package_manager/service_creator/sonic_db.py new file mode 100644 index 000000000000..a064c60c4a53 --- /dev/null +++ b/sonic_package_manager/service_creator/sonic_db.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python + +import contextlib +import json +import os + +from swsscommon import swsscommon + +from sonic_package_manager.service_creator import ETC_SONIC_PATH +from sonic_package_manager.service_creator.utils import in_chroot + +CONFIG_DB = 'CONFIG_DB' +CONFIG_DB_JSON = os.path.join(ETC_SONIC_PATH, 'config_db.json') +INIT_CFG_JSON = os.path.join(ETC_SONIC_PATH, 'init_cfg.json') + + +class FileDbTable: + """ swsscommon.Table adapter for persistent DBs. """ + + def __init__(self, file, table): + self._file = file + self._table = table + + def keys(self): + with open(self._file) as stream: + config = json.load(stream) + return config.get(self._table, {}).keys() + + def get(self, key): + with open(self._file) as stream: + config = json.load(stream) + + table = config.get(self._table, {}) + exists = key in table + fvs_dict = table.get(key, {}) + fvs = list(fvs_dict.items()) + return exists, fvs + + def set(self, key, fvs): + with open(self._file) as stream: + config = json.load(stream) + + table = config.setdefault(self._table, {}) + table.update({key: dict(fvs)}) + + with open(self._file, 'w') as stream: + json.dump(config, stream, indent=4) + + def _del(self, key): + with open(self._file) as stream: + config = json.load(stream) + + with contextlib.suppress(KeyError): + config[self._table].pop(key) + + with open(self._file, 'w') as stream: + json.dump(config, stream, indent=4) + + +class SonicDB: + """ Store different DB access objects for + running DB and also for persistent and initial + configs. """ + + _running = None + + @classmethod + def running_table(cls, table): + """ Returns running DB table. """ + + # In chroot we can connect to a running + # DB via TCP socket, we should ignore this case. + if in_chroot(): + return None + + if cls._running is None: + try: + cls._running = swsscommon.DBConnector(CONFIG_DB, 0) + except RuntimeError: + # Failed to connect to DB. + return None + + return swsscommon.Table(cls._running, table) + + @classmethod + def persistent_table(cls, table): + """ Returns persistent DB table. """ + + if not os.path.exists(CONFIG_DB_JSON): + return None + + return FileDbTable(CONFIG_DB_JSON, table) + + @classmethod + def initial_table(cls, table): + """ Returns initial DB table. """ + + return FileDbTable(INIT_CFG_JSON, table) diff --git a/sonic_package_manager/service_creator/utils.py b/sonic_package_manager/service_creator/utils.py new file mode 100644 index 000000000000..cdeeb17abb22 --- /dev/null +++ b/sonic_package_manager/service_creator/utils.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python + +import os + + +def in_chroot() -> bool: + """ Verify if we are running in chroot or not + by comparing root / mount point device id and inode + with init process - /proc/1/root mount point device + id and inode. If those match we are not chroot-ed + otherwise we are. """ + + root_stat = os.stat('/') + init_root_stat = os.stat('/proc/1/root') + + return (root_stat.st_dev, root_stat.st_ino) != \ + (init_root_stat.st_dev, init_root_stat.st_ino) diff --git a/sonic_package_manager/source.py b/sonic_package_manager/source.py new file mode 100644 index 000000000000..c179e0b3ee19 --- /dev/null +++ b/sonic_package_manager/source.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python3 + +from sonic_package_manager.database import PackageDatabase, PackageEntry +from sonic_package_manager.dockerapi import DockerApi, get_repository_from_image +from sonic_package_manager.metadata import Metadata, MetadataResolver +from sonic_package_manager.package import Package + + +class PackageSource(object): + """ PackageSource abstracts the way manifest is read + and image is retrieved based on different image sources. + (i.e from registry, from tarball or locally installed) """ + + def __init__(self, + database: PackageDatabase, + docker: DockerApi, + metadata_resolver: MetadataResolver): + self.database = database + self.docker = docker + self.metadata_resolver = metadata_resolver + + def get_metadata(self) -> Metadata: + """ Returns package manifest. + Child class has to implement this method. + + Returns: + Metadata + """ + raise NotImplementedError + + def install_image(self, package: Package): + """ Install image based on package source. + Child class has to implement this method. + + Args: + package: SONiC Package + Returns: + Docker Image object. + """ + + raise NotImplementedError + + def install(self, package: Package): + """ Install image based on package source, + record installation infromation in PackageEntry.. + + Args: + package: SONiC Package + """ + + image = self.install_image(package) + package.entry.image_id = image.id + # if no repository is defined for this package + # get repository from image + if not package.repository: + package.entry.repository = get_repository_from_image(image) + + def uninstall(self, package: Package): + """ Uninstall image. + + Args: + package: SONiC Package + """ + + self.docker.rmi(package.image_id) + package.entry.image_id = None + + def get_package(self) -> Package: + """ Returns SONiC Package based on manifest. + + Returns: + SONiC Package + """ + + metadata = self.get_metadata() + manifest = metadata.manifest + + name = manifest['package']['name'] + description = manifest['package']['description'] + + # Will be resolved in install() method. + # When installing from tarball we don't know yet + # the repository for this package. + repository = None + + if self.database.has_package(name): + # inherit package database info + package_entry = self.database.get_package(name) + else: + package_entry = PackageEntry(name, repository, + description=description) + + return Package( + package_entry, + metadata + ) + + +class TarballSource(PackageSource): + """ TarballSource implements PackageSource + for locally existing image saved as tarball. """ + + def __init__(self, + tarball_path: str, + database: PackageDatabase, + docker: DockerApi, + metadata_resolver: MetadataResolver): + super().__init__(database, + docker, + metadata_resolver) + self.tarball_path = tarball_path + + def get_metadata(self) -> Metadata: + """ Returns manifest read from tarball. """ + + return self.metadata_resolver.from_tarball(self.tarball_path) + + def install_image(self, package: Package): + """ Installs image from local tarball source. """ + + return self.docker.load(self.tarball_path) + + +class RegistrySource(PackageSource): + """ RegistrySource implements PackageSource + for packages that are pulled from registry. """ + + def __init__(self, + repository: str, + reference: str, + database: PackageDatabase, + docker: DockerApi, + metadata_resolver: MetadataResolver): + super().__init__(database, + docker, + metadata_resolver) + self.repository = repository + self.reference = reference + + def get_metadata(self) -> Metadata: + """ Returns manifest read from registry. """ + + return self.metadata_resolver.from_registry(self.repository, + self.reference) + + def install_image(self, package: Package): + """ Installs image from registry. """ + + image_id = self.docker.pull(self.repository, self.reference) + if not package.entry.default_reference: + package.entry.default_reference = self.reference + return image_id + + +class LocalSource(PackageSource): + """ LocalSource accesses local docker library to retrieve manifest + but does not implement installation of the image. """ + + def __init__(self, + entry: PackageEntry, + database: PackageDatabase, + docker: DockerApi, + metadata_resolver: MetadataResolver): + super().__init__(database, + docker, + metadata_resolver) + self.entry = entry + + def get_metadata(self) -> Metadata: + """ Returns manifest read from locally installed Docker. """ + + image = self.entry.image_id + + if self.entry.built_in: + # Built-in (installed not via sonic-package-manager) + # won't have image_id in database. Using their + # repository name as image. + image = f'{self.entry.repository}:latest' + + return self.metadata_resolver.from_local(image) + + def get_package(self) -> Package: + return Package(self.entry, self.get_metadata()) diff --git a/sonic_package_manager/utils.py b/sonic_package_manager/utils.py new file mode 100644 index 000000000000..410947dd246f --- /dev/null +++ b/sonic_package_manager/utils.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +import keyword +import re + +from docker_image.reference import Reference + +DockerReference = Reference + + +def make_python_identifier(string): + """ + Takes an arbitrary string and creates a valid Python identifier. + + Identifiers must follow the convention outlined here: + https://docs.python.org/2/reference/lexical_analysis.html#identifiers + """ + + # create a working copy (and make it lowercase, while we're at it) + s = string.lower() + + # remove leading and trailing whitespace + s = s.strip() + + # Make spaces into underscores + s = re.sub('[\\s\\t\\n]+', '_', s) + + # Remove invalid characters + s = re.sub('[^0-9a-zA-Z_]', '', s) + + # Remove leading characters until we find a letter or underscore + s = re.sub('^[^a-zA-Z_]+', '', s) + + # Check that the string is not a python identifier + while s in keyword.kwlist: + if re.match(".*?_\d+$", s): + i = re.match(".*?_(\d+)$", s).groups()[0] + s = s.strip('_'+i) + '_'+str(int(i)+1) + else: + s += '_1' + + return s diff --git a/sonic_package_manager/version.py b/sonic_package_manager/version.py new file mode 100644 index 000000000000..e5a5623d3bd9 --- /dev/null +++ b/sonic_package_manager/version.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python + +""" Version and helpers routines. """ + +import semver + +Version = semver.Version +VersionRange = semver.VersionRange + + +def version_to_tag(ver: Version) -> str: + """ Converts the version to Docker compliant tag string. """ + + return str(ver).replace('+', '_') + + +def tag_to_version(tag: str) -> Version: + """ Converts the version to Docker compliant tag string. """ + + try: + return Version.parse(tag.replace('_', '+')) + except ValueError as err: + raise ValueError(f'Failed to convert {tag} to version string: {err}') diff --git a/tests/sonic_package_manager/conftest.py b/tests/sonic_package_manager/conftest.py new file mode 100644 index 000000000000..cee997596c64 --- /dev/null +++ b/tests/sonic_package_manager/conftest.py @@ -0,0 +1,377 @@ +#!/usr/bin/env python + +from dataclasses import dataclass +from unittest import mock +from unittest.mock import Mock, MagicMock + +import pytest +from docker_image.reference import Reference + +from sonic_package_manager.database import PackageDatabase, PackageEntry +from sonic_package_manager.manager import DockerApi, PackageManager +from sonic_package_manager.manifest import Manifest +from sonic_package_manager.metadata import Metadata, MetadataResolver +from sonic_package_manager.registry import RegistryResolver +from sonic_package_manager.version import Version +from sonic_package_manager.service_creator.creator import * + + +@pytest.fixture +def mock_docker_api(): + docker = MagicMock(DockerApi) + + @dataclass + class Image: + id: str + + @property + def attrs(self): + return {'RepoTags': [self.id]} + + def pull(repo, ref): + return Image(f'{repo}:{ref}') + + def load(filename): + return Image(filename) + + docker.pull = MagicMock(side_effect=pull) + docker.load = MagicMock(side_effect=load) + + yield docker + + +@pytest.fixture +def mock_registry_resolver(): + yield Mock(RegistryResolver) + + +@pytest.fixture +def mock_metadata_resolver(): + yield Mock(MetadataResolver) + + +@pytest.fixture +def mock_feature_registry(): + yield MagicMock() + + +@pytest.fixture +def mock_service_creator(): + yield Mock() + + +@pytest.fixture +def mock_sonic_db(): + yield Mock() + + +@pytest.fixture +def fake_metadata_resolver(): + class FakeMetadataResolver: + def __init__(self): + self.metadata_store = {} + self.add('docker-database', 'latest', 'database', '1.0.0') + self.add('docker-orchagent', 'latest', 'swss', '1.0.0', + components={ + 'libswsscommon': Version.parse('1.0.0'), + 'libsairedis': Version.parse('1.0.0') + } + ) + self.add('Azure/docker-test', '1.6.0', 'test-package', '1.6.0') + self.add('Azure/docker-test-2', '1.5.0', 'test-package-2', '1.5.0') + self.add('Azure/docker-test-2', '2.0.0', 'test-package-2', '2.0.0') + self.add('Azure/docker-test-3', 'latest', 'test-package-3', '1.6.0') + self.add('Azure/docker-test-3', '1.5.0', 'test-package-3', '1.5.0') + self.add('Azure/docker-test-3', '1.6.0', 'test-package-3', '1.6.0') + self.add('Azure/docker-test-4', '1.5.0', 'test-package-4', '1.5.0') + self.add('Azure/docker-test-5', '1.5.0', 'test-package-5', '1.5.0') + self.add('Azure/docker-test-5', '1.9.0', 'test-package-5', '1.9.0') + self.add('Azure/docker-test-6', '1.5.0', 'test-package-6', '1.5.0') + self.add('Azure/docker-test-6', '1.9.0', 'test-package-6', '1.9.0') + self.add('Azure/docker-test-6', '2.0.0', 'test-package-6', '2.0.0') + self.add('Azure/docker-test-6', 'latest', 'test-package-6', '1.5.0') + + def from_registry(self, repository: str, reference: str): + manifest = Manifest.marshal(self.metadata_store[repository][reference]['manifest']) + components = self.metadata_store[repository][reference]['components'] + return Metadata(manifest, components) + + def from_local(self, image: str): + ref = Reference.parse(image) + manifest = Manifest.marshal(self.metadata_store[ref['name']][ref['tag']]['manifest']) + components = self.metadata_store[ref['name']][ref['tag']]['components'] + return Metadata(manifest, components) + + def from_tarball(self, filepath: str) -> Manifest: + path, ref = filepath.split(':') + manifest = Manifest.marshal(self.metadata_store[path][ref]['manifest']) + components = self.metadata_store[path][ref]['components'] + return Metadata(manifest, components) + + def add(self, repo, reference, name, version, components=None): + repo_dict = self.metadata_store.setdefault(repo, {}) + repo_dict[reference] = { + 'manifest': { + 'package': { + 'version': version, + 'name': name, + 'base-os': {}, + }, + 'service': { + 'name': name, + } + }, + 'components': components or {}, + } + + yield FakeMetadataResolver() + + +@pytest.fixture +def fake_device_info(): + class FakeDeviceInfo: + def __init__(self): + self.multi_npu = True + self.num_npus = 1 + self.version_info = { + 'libswsscommon': '1.0.0', + } + + def is_multi_npu(self): + return self.multi_npu + + def get_num_npus(self): + return self.num_npus + + def get_sonic_version_info(self): + return self.version_info + + yield FakeDeviceInfo() + + +def add_package(content, metadata_resolver, repository, reference, **kwargs): + metadata = metadata_resolver.from_registry(repository, reference) + name = metadata.manifest['package']['name'] + version = metadata.manifest['package']['version'] + installed = kwargs.get('installed', False) + built_in = kwargs.get('built-in', False) + + if installed and not built_in and 'image_id' not in kwargs: + kwargs['image_id'] = f'{repository}:{reference}' + + if installed and 'version' not in kwargs: + kwargs['version'] = version + + content[name] = PackageEntry(name, repository, **kwargs) + + +@pytest.fixture +def fake_db(fake_metadata_resolver): + content = {} + + add_package( + content, + fake_metadata_resolver, + 'docker-database', + 'latest', + description='SONiC database service', + default_reference='1.0.0', + installed=True, + built_in=True + ) + add_package( + content, + fake_metadata_resolver, + 'docker-orchagent', + 'latest', + description='SONiC switch state service', + default_reference='1.0.0', + installed=True, + built_in=True + ) + add_package( + content, + fake_metadata_resolver, + 'Azure/docker-test', + '1.6.0', + description='SONiC Package Manager Test Package', + default_reference='1.6.0', + installed=False, + built_in=False + ) + add_package( + content, + fake_metadata_resolver, + 'Azure/docker-test-2', + '1.5.0', + description='SONiC Package Manager Test Package #2', + default_reference='1.5.0', + installed=False, + built_in=False + ) + add_package( + content, + fake_metadata_resolver, + 'Azure/docker-test-3', + '1.5.0', + description='SONiC Package Manager Test Package #3', + default_reference='1.5.0', + installed=True, + built_in=False + ) + add_package( + content, + fake_metadata_resolver, + 'Azure/docker-test-5', + '1.9.0', + description='SONiC Package Manager Test Package #5', + default_reference='1.9.0', + installed=False, + built_in=False + ) + add_package( + content, + fake_metadata_resolver, + 'Azure/docker-test-6', + '1.5.0', + description='SONiC Package Manager Test Package #6', + default_reference='1.5.0', + installed=False, + built_in=False + ) + + yield PackageDatabase(content) + + +@pytest.fixture +def fake_db_for_migration(fake_metadata_resolver): + content = {} + add_package( + content, + fake_metadata_resolver, + 'docker-database', + 'latest', + description='SONiC database service', + default_reference='1.0.0', + installed=True, + built_in=True + ) + add_package( + content, + fake_metadata_resolver, + 'docker-orchagent', + 'latest', + description='SONiC switch state service', + default_reference='1.0.0', + installed=True, + built_in=True + ) + add_package( + content, + fake_metadata_resolver, + 'Azure/docker-test', + '1.6.0', + description='SONiC Package Manager Test Package', + default_reference='1.6.0', + installed=False, + built_in=False + ) + add_package( + content, + fake_metadata_resolver, + 'Azure/docker-test-2', + '2.0.0', + description='SONiC Package Manager Test Package #2', + default_reference='2.0.0', + installed=False, + built_in=False + ) + add_package( + content, + fake_metadata_resolver, + 'Azure/docker-test-3', + '1.6.0', + description='SONiC Package Manager Test Package #3', + default_reference='1.6.0', + installed=True, + built_in=False + ) + add_package( + content, + fake_metadata_resolver, + 'Azure/docker-test-4', + '1.5.0', + description='SONiC Package Manager Test Package #4', + default_reference='1.5.0', + installed=True, + built_in=False + ) + add_package( + content, + fake_metadata_resolver, + 'Azure/docker-test-5', + '1.5.0', + description='SONiC Package Manager Test Package #5', + default_reference='1.5.0', + installed=True, + built_in=False + ) + add_package( + content, + fake_metadata_resolver, + 'Azure/docker-test-6', + '2.0.0', + description='SONiC Package Manager Test Package #6', + default_reference='2.0.0', + installed=True, + built_in=False + ) + + yield PackageDatabase(content) + + +@pytest.fixture() +def sonic_fs(fs): + fs.create_file('/proc/1/root') + fs.create_dir(ETC_SONIC_PATH) + fs.create_dir(SYSTEMD_LOCATION) + fs.create_dir(DOCKER_CTL_SCRIPT_LOCATION) + fs.create_dir(SERVICE_MGMT_SCRIPT_LOCATION) + fs.create_file(os.path.join(TEMPLATES_PATH, SERVICE_FILE_TEMPLATE)) + fs.create_file(os.path.join(TEMPLATES_PATH, TIMER_UNIT_TEMPLATE)) + fs.create_file(os.path.join(TEMPLATES_PATH, SERVICE_MGMT_SCRIPT_TEMPLATE)) + fs.create_file(os.path.join(TEMPLATES_PATH, DOCKER_CTL_SCRIPT_TEMPLATE)) + fs.create_file(os.path.join(TEMPLATES_PATH, DEBUG_DUMP_SCRIPT_TEMPLATE)) + yield fs + + +@pytest.fixture(autouse=True) +def patch_pkgutil(): + with mock.patch('pkgutil.get_loader'): + yield + + +@pytest.fixture +def package_manager(mock_docker_api, + mock_registry_resolver, + mock_service_creator, + fake_metadata_resolver, + fake_db, + fake_device_info): + yield PackageManager(mock_docker_api, mock_registry_resolver, + fake_db, fake_metadata_resolver, + mock_service_creator, + fake_device_info, + MagicMock()) + + +@pytest.fixture +def anything(): + """ Fixture that returns Any object that can be used in + assert_called_*_with to match any object passed. """ + + class Any: + def __eq__(self, other): + return True + + yield Any() diff --git a/tests/sonic_package_manager/test_cli.py b/tests/sonic_package_manager/test_cli.py new file mode 100644 index 000000000000..695d8cba58b8 --- /dev/null +++ b/tests/sonic_package_manager/test_cli.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +from click.testing import CliRunner + +from sonic_package_manager import main + + +def test_show_changelog(package_manager, fake_metadata_resolver): + """ Test case for "sonic-package-manager package show changelog [NAME]" """ + + runner = CliRunner() + changelog = { + "1.0.0": { + "changes": ["Initial release"], + "author": "Stepan Blyshchak", + "email": "stepanb@nvidia.com", + "date": "Mon, 25 May 2020 12:24:30 +0300" + }, + "1.1.0": { + "changes": [ + "Added functionality", + "Bug fixes" + ], + "author": "Stepan Blyshchak", + "email": "stepanb@nvidia.com", + "date": "Fri, 23 Oct 2020 12:26:08 +0300" + } + } + manifest = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0']['manifest'] + manifest['package']['changelog'] = changelog + + expected_output = """\ +1.0.0: + + • Initial release + + Stepan Blyshchak (stepanb@nvidia.com) Mon, 25 May 2020 12:24:30 +0300 + +1.1.0: + + • Added functionality + • Bug fixes + + Stepan Blyshchak (stepanb@nvidia.com) Fri, 23 Oct 2020 12:26:08 +0300 + +""" + + result = runner.invoke(main.show.commands['package'].commands['changelog'], + ['test-package'], obj=package_manager) + + assert result.exit_code == 0 + assert result.output == expected_output + + +def test_show_changelog_no_changelog(package_manager): + """ Test case for "sonic-package-manager package show changelog [NAME]" + when there is no changelog provided by package. """ + + runner = CliRunner() + result = runner.invoke(main.show.commands['package'].commands['changelog'], ['test-package'], obj=package_manager) + + assert result.exit_code == 1 + assert result.output == 'Failed to print package changelog: No changelog for package test-package\n' diff --git a/tests/sonic_package_manager/test_constraint.py b/tests/sonic_package_manager/test_constraint.py new file mode 100644 index 000000000000..1b34a301d299 --- /dev/null +++ b/tests/sonic_package_manager/test_constraint.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python + +from sonic_package_manager import version +from sonic_package_manager.constraint import PackageConstraint +from sonic_package_manager.version import Version, VersionRange + + +def test_constraint(): + package_constraint = PackageConstraint.parse('swss>1.0.0') + assert package_constraint.name == 'swss' + assert not package_constraint.constraint.allows(Version.parse('0.9.1')) + assert package_constraint.constraint.allows(Version.parse('1.1.1')) + + +def test_constraint_range(): + package_constraint = PackageConstraint.parse('swss^1.2.0') + assert package_constraint.name == 'swss' + assert not package_constraint.constraint.allows(Version.parse('1.1.1')) + assert package_constraint.constraint.allows(Version.parse('1.2.5')) + assert not package_constraint.constraint.allows(Version.parse('2.0.1')) + + +def test_constraint_strict(): + package_constraint = PackageConstraint.parse('swss==1.2.0') + assert package_constraint.name == 'swss' + assert not package_constraint.constraint.allows(Version.parse('1.1.1')) + assert package_constraint.constraint.allows(Version.parse('1.2.0')) + + +def test_constraint_match(): + package_constraint = PackageConstraint.parse('swss==1.2*.*') + assert package_constraint.name == 'swss' + assert not package_constraint.constraint.allows(Version.parse('1.1.1')) + assert package_constraint.constraint.allows(Version.parse('1.2.0')) + + +def test_constraint_multiple(): + package_constraint = PackageConstraint.parse('swss>1.2.0,<3.0.0,!=2.2.2') + assert package_constraint.name == 'swss' + assert not package_constraint.constraint.allows(Version.parse('2.2.2')) + assert not package_constraint.constraint.allows(Version.parse('3.2.0')) + assert not package_constraint.constraint.allows(Version.parse('0.2.0')) + assert package_constraint.constraint.allows(Version.parse('2.2.3')) + assert package_constraint.constraint.allows(Version.parse('1.2.3')) + + +def test_constraint_only_name(): + package_constraint = PackageConstraint.parse('swss') + assert package_constraint.name == 'swss' + assert package_constraint.constraint == VersionRange() + + +def test_constraint_from_dict(): + package_constraint = PackageConstraint.parse({ + 'name': 'swss', + 'version': '^1.0.0', + 'components': { + 'libswsscommon': '^1.1.0', + }, + }) + assert package_constraint.name == 'swss' + assert package_constraint.constraint.allows(Version.parse('1.0.0')) + assert not package_constraint.constraint.allows(Version.parse('2.0.0')) + assert package_constraint.components['libswsscommon'].allows(Version.parse('1.2.0')) + assert not package_constraint.components['libswsscommon'].allows(Version.parse('1.0.0')) + assert not package_constraint.components['libswsscommon'].allows(Version.parse('2.0.0')) + + +def test_version_to_tag(): + assert version.version_to_tag(Version.parse('1.0.0-rc0')) == '1.0.0-rc0' + assert version.version_to_tag(Version.parse('1.0.0-rc0+152')) == '1.0.0-rc0_152' + + +def test_tag_to_version(): + assert str(version.tag_to_version('1.0.0-rc0_152')) == '1.0.0-rc0+152' + assert str(version.tag_to_version('1.0.0-rc0')) == '1.0.0-rc0' diff --git a/tests/sonic_package_manager/test_database.py b/tests/sonic_package_manager/test_database.py new file mode 100644 index 000000000000..1c565d6f4ce5 --- /dev/null +++ b/tests/sonic_package_manager/test_database.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python + +import pytest + +from sonic_package_manager.database import PackageEntry +from sonic_package_manager.errors import ( + PackageNotFoundError, + PackageAlreadyExistsError, + PackageManagerError +) +from sonic_package_manager.version import Version + + +def test_database_get_package(fake_db): + swss_package = fake_db.get_package('swss') + assert swss_package.installed + assert swss_package.built_in + assert swss_package.repository == 'docker-orchagent' + assert swss_package.default_reference == '1.0.0' + assert swss_package.version == Version(1, 0, 0) + + +def test_database_get_package_not_builtin(fake_db): + test_package = fake_db.get_package('test-package') + assert not test_package.installed + assert not test_package.built_in + assert test_package.repository == 'Azure/docker-test' + assert test_package.default_reference == '1.6.0' + assert test_package.version is None + + +def test_database_get_package_not_existing(fake_db): + with pytest.raises(PackageNotFoundError): + fake_db.get_package('abc') + + +def test_database_add_package(fake_db): + fake_db.add_package('test-package-99', 'Azure/docker-test-99') + test_package = fake_db.get_package('test-package-99') + assert not test_package.installed + assert not test_package.built_in + assert test_package.repository == 'Azure/docker-test-99' + assert test_package.default_reference is None + assert test_package.version is None + + +def test_database_add_package_existing(fake_db): + with pytest.raises(PackageAlreadyExistsError): + fake_db.add_package('swss', 'Azure/docker-orchagent') + + +def test_database_update_package(fake_db): + test_package = fake_db.get_package('test-package-2') + test_package.installed = True + test_package.version = Version(1, 2, 3) + fake_db.update_package(test_package) + test_package = fake_db.get_package('test-package-2') + assert test_package.installed + assert test_package.version == Version(1, 2, 3) + + +def test_database_update_package_non_existing(fake_db): + test_package = PackageEntry('abc', 'abc') + with pytest.raises(PackageNotFoundError): + fake_db.update_package(test_package) + + +def test_database_remove_package(fake_db): + fake_db.remove_package('test-package') + assert not fake_db.has_package('test-package') + + +def test_database_remove_package_non_existing(fake_db): + with pytest.raises(PackageNotFoundError): + fake_db.remove_package('non-existing-package') + + +def test_database_remove_package_installed(fake_db): + with pytest.raises(PackageManagerError, + match='Package test-package-3 is installed, ' + 'uninstall it first'): + fake_db.remove_package('test-package-3') + + +def test_database_remove_package_built_in(fake_db): + with pytest.raises(PackageManagerError, + match='Package swss is built-in, ' + 'cannot remove it'): + fake_db.remove_package('swss') diff --git a/tests/sonic_package_manager/test_manager.py b/tests/sonic_package_manager/test_manager.py new file mode 100644 index 000000000000..c7eb1ca7ac45 --- /dev/null +++ b/tests/sonic_package_manager/test_manager.py @@ -0,0 +1,322 @@ +#!/usr/bin/env python + +from unittest.mock import Mock, call + +import pytest + +from sonic_package_manager.errors import * +from sonic_package_manager.version import Version + + +def test_installation_not_installed(package_manager): + package_manager.install('test-package') + package = package_manager.get_installed_package('test-package') + assert package.installed + assert package.entry.default_reference == '1.6.0' + + +def test_installation_already_installed(package_manager): + package_manager.install('test-package') + with pytest.raises(PackageManagerError, + match='1.6.0 is already installed'): + package_manager.install('test-package') + + +def test_installation_dependencies(package_manager, fake_metadata_resolver, mock_docker_api): + manifest = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0']['manifest'] + manifest['package']['depends'] = ['swss^2.0.0'] + with pytest.raises(PackageInstallationError, + match='Package test-package requires swss>=2.0.0,<3.0.0 ' + 'but version 1.0.0 is installed'): + package_manager.install('test-package') + + +def test_installation_dependencies_missing_package(package_manager, fake_metadata_resolver): + manifest = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0']['manifest'] + manifest['package']['depends'] = ['missing-package>=1.0.0'] + with pytest.raises(PackageInstallationError, + match='Package test-package requires ' + 'missing-package>=1.0.0 but it is not installed'): + package_manager.install('test-package') + + +def test_installation_dependencies_satisfied(package_manager, fake_metadata_resolver): + manifest = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0']['manifest'] + manifest['package']['depends'] = ['database>=1.0.0', 'swss>=1.0.0'] + package_manager.install('test-package') + + +def test_installation_components_dependencies_satisfied(package_manager, fake_metadata_resolver): + metadata = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0'] + manifest = metadata['manifest'] + metadata['components'] = { + 'libswsscommon': Version.parse('1.1.0') + } + manifest['package']['depends'] = [ + { + 'name': 'swss', + 'version': '>=1.0.0', + 'components': { + 'libswsscommon': '^1.0.0', + }, + }, + ] + package_manager.install('test-package') + + +def test_installation_components_dependencies_not_satisfied(package_manager, fake_metadata_resolver): + metadata = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0'] + manifest = metadata['manifest'] + metadata['components'] = { + 'libswsscommon': Version.parse('1.1.0') + } + manifest['package']['depends'] = [ + { + 'name': 'swss', + 'version': '>=1.0.0', + 'components': { + 'libswsscommon': '^1.1.0', + }, + }, + ] + with pytest.raises(PackageInstallationError, + match='Package test-package requires libswsscommon >=1.1.0,<2.0.0 ' + 'in package swss>=1.0.0 but version 1.0.0 is installed'): + package_manager.install('test-package') + + +def test_installation_components_dependencies_implicit(package_manager, fake_metadata_resolver): + metadata = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0'] + manifest = metadata['manifest'] + metadata['components'] = { + 'libswsscommon': Version.parse('2.1.0') + } + manifest['package']['depends'] = [ + { + 'name': 'swss', + 'version': '>=1.0.0', + }, + ] + with pytest.raises(PackageInstallationError, + match='Package test-package requires libswsscommon >=2.1.0,<3.0.0 ' + 'in package swss>=1.0.0 but version 1.0.0 is installed'): + package_manager.install('test-package') + + +def test_installation_components_dependencies_explicitely_allowed(package_manager, fake_metadata_resolver): + metadata = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0'] + manifest = metadata['manifest'] + metadata['components'] = { + 'libswsscommon': Version.parse('2.1.0') + } + manifest['package']['depends'] = [ + { + 'name': 'swss', + 'version': '>=1.0.0', + 'components': { + 'libswsscommon': '>=1.0.0,<3.0.0' + } + }, + ] + package_manager.install('test-package') + + +def test_installation_breaks(package_manager, fake_metadata_resolver): + manifest = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0']['manifest'] + manifest['package']['breaks'] = ['swss^1.0.0'] + with pytest.raises(PackageInstallationError, + match='Package test-package conflicts with ' + 'swss>=1.0.0,<2.0.0 but version 1.0.0 is installed'): + package_manager.install('test-package') + + +def test_installation_breaks_missing_package(package_manager, fake_metadata_resolver): + manifest = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0']['manifest'] + manifest['package']['breaks'] = ['missing-package^1.0.0'] + package_manager.install('test-package') + + +def test_installation_breaks_not_installed_package(package_manager, fake_metadata_resolver): + manifest = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0']['manifest'] + manifest['package']['breaks'] = ['test-package-2^1.0.0'] + package_manager.install('test-package') + + +def test_installation_base_os_constraint(package_manager, fake_metadata_resolver): + manifest = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0']['manifest'] + manifest['package']['base-os']['libswsscommon'] = '>=2.0.0' + with pytest.raises(PackageSonicRequirementError, + match='Package test-package requires base OS component libswsscommon ' + 'version >=2.0.0 while the installed version is 1.0.0'): + package_manager.install('test-package') + + +def test_installation_base_os_constraint_satisfied(package_manager, fake_metadata_resolver): + manifest = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0']['manifest'] + manifest['package']['base-os']['libswsscommon'] = '>=1.0.0' + package_manager.install('test-package') + + +def test_installation_cli_plugin(package_manager, fake_metadata_resolver, anything): + manifest = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0']['manifest'] + manifest['cli']= {'show': '/cli/plugin.py'} + package_manager._install_cli_plugins = Mock() + package_manager.install('test-package') + package_manager._install_cli_plugins.assert_called_once_with(anything) + + +def test_installation_cli_plugin_skipped(package_manager, fake_metadata_resolver, anything): + manifest = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0']['manifest'] + manifest['cli']= {'show': '/cli/plugin.py'} + package_manager._install_cli_plugins = Mock() + package_manager.install('test-package', skip_host_plugins=True) + package_manager._install_cli_plugins.assert_not_called() + + +def test_installation_cli_plugin_is_mandatory_but_skipped(package_manager, fake_metadata_resolver): + manifest = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0']['manifest'] + manifest['cli']= {'mandatory': True} + with pytest.raises(PackageManagerError, + match='CLI is mandatory for package test-package but ' + 'it was requested to be not installed'): + package_manager.install('test-package', skip_host_plugins=True) + + +def test_installation(package_manager, mock_docker_api, anything): + package_manager.install('test-package') + mock_docker_api.pull.assert_called_once_with('Azure/docker-test', '1.6.0') + + +def test_installation_using_reference(package_manager, + fake_metadata_resolver, + mock_docker_api, + anything): + ref = 'sha256:9780f6d83e45878749497a6297ed9906c19ee0cc48cc88dc63827564bb8768fd' + metadata = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0'] + fake_metadata_resolver.metadata_store['Azure/docker-test'][ref] = metadata + + package_manager.install(f'test-package@{ref}') + mock_docker_api.pull.assert_called_once_with('Azure/docker-test', f'{ref}') + + +def test_manager_installation_tag(package_manager, + mock_docker_api, + anything): + package_manager.install(f'test-package=1.6.0') + mock_docker_api.pull.assert_called_once_with('Azure/docker-test', '1.6.0') + + +def test_installation_from_file(package_manager, mock_docker_api, sonic_fs): + sonic_fs.create_file('Azure/docker-test:1.6.0') + package_manager.install(tarball='Azure/docker-test:1.6.0') + mock_docker_api.load.assert_called_once_with('Azure/docker-test:1.6.0') + + +def test_installation_from_registry(package_manager, mock_docker_api): + package_manager.install(repotag='Azure/docker-test:1.6.0') + mock_docker_api.pull.assert_called_once_with('Azure/docker-test', '1.6.0') + + +def test_installation_from_registry_using_digest(package_manager, mock_docker_api, fake_metadata_resolver): + ref = 'sha256:9780f6d83e45878749497a6297ed9906c19ee0cc48cc88dc63827564bb8768fd' + metadata = fake_metadata_resolver.metadata_store['Azure/docker-test']['1.6.0'] + fake_metadata_resolver.metadata_store['Azure/docker-test'][ref] = metadata + + ref = 'sha256:9780f6d83e45878749497a6297ed9906c19ee0cc48cc88dc63827564bb8768fd' + package_manager.install(repotag=f'Azure/docker-test@{ref}') + mock_docker_api.pull.assert_called_once_with('Azure/docker-test', ref) + + +def test_installation_from_file_known_package(package_manager, fake_db, sonic_fs): + repository = fake_db.get_package('test-package').repository + sonic_fs.create_file('Azure/docker-test:1.6.0') + package_manager.install(tarball='Azure/docker-test:1.6.0') + # locally installed package does not override already known package repository + assert repository == fake_db.get_package('test-package').repository + + +def test_installation_from_file_unknown_package(package_manager, fake_db, sonic_fs): + assert not fake_db.has_package('test-package-4') + sonic_fs.create_file('Azure/docker-test-4:1.5.0') + package_manager.install(tarball='Azure/docker-test-4:1.5.0') + assert fake_db.has_package('test-package-4') + + +def test_upgrade_from_file_known_package(package_manager, fake_db, sonic_fs): + repository = fake_db.get_package('test-package-6').repository + # install older version from repository + package_manager.install('test-package-6=1.5.0') + # upgrade from file + sonic_fs.create_file('Azure/docker-test-6:2.0.0') + package_manager.install(tarball='Azure/docker-test-6:2.0.0') + # locally installed package does not override already known package repository + assert repository == fake_db.get_package('test-package-6').repository + + +def test_installation_non_default_owner(package_manager, anything, mock_service_creator): + package_manager.install('test-package', default_owner='kube') + mock_service_creator.create.assert_called_once_with(anything, state='disabled', owner='kube') + + +def test_installation_enabled(package_manager, anything, mock_service_creator): + package_manager.install('test-package', enable=True) + mock_service_creator.create.assert_called_once_with(anything, state='enabled', owner='local') + + +def test_installation_fault(package_manager, mock_docker_api, mock_service_creator): + # make 'tag' to fail + mock_service_creator.create = Mock(side_effect=Exception('Failed to create service')) + # 'rmi' is called on rollback + mock_docker_api.rmi = Mock(side_effect=Exception('Failed to remove image')) + # assert that the rollback does not hide the original failure. + with pytest.raises(Exception, match='Failed to create service'): + package_manager.install('test-package') + mock_docker_api.rmi.assert_called_once() + + +def test_manager_installation_version_range(package_manager): + with pytest.raises(PackageManagerError, + match='Can only install specific version. ' + 'Use only following expression "test-package=" ' + 'to install specific version'): + package_manager.install(f'test-package>=1.6.0') + + +def test_manager_upgrade(package_manager, sonic_fs): + package_manager.install('test-package-6=1.5.0') + package = package_manager.get_installed_package('test-package-6') + + package_manager.install('test-package-6=2.0.0') + upgraded_package = package_manager.get_installed_package('test-package-6') + assert upgraded_package.entry.version == Version(2, 0, 0) + assert upgraded_package.entry.default_reference == package.entry.default_reference + + +def test_manager_package_reset(package_manager, sonic_fs): + package_manager.install('test-package-6=1.5.0') + package_manager.install('test-package-6=2.0.0') + + package_manager.reset('test-package-6') + upgraded_package = package_manager.get_installed_package('test-package-6') + assert upgraded_package.entry.version == Version(1, 5, 0) + + +def test_manager_migration(package_manager, fake_db_for_migration): + package_manager.install = Mock() + package_manager.migrate_packages(fake_db_for_migration) + + package_manager.install.assert_has_calls([ + # test-package-3 was installed but there is a newer version installed + # in fake_db_for_migration, asserting for upgrade + call('test-package-3=1.6.0'), + # test-package-4 was not present in DB at all, but it is present and installed in + # fake_db_for_migration, thus asserting that it is going to be installed. + call('test-package-4=1.5.0'), + # test-package-5 1.5.0 was installed in fake_db_for_migration but the default + # in current db is 1.9.0, assert that migration will install the newer version. + call('test-package-5=1.9.0'), + # test-package-6 2.0.0 was installed in fake_db_for_migration but the default + # in current db is 1.5.0, assert that migration will install the newer version. + call('test-package-6=2.0.0')], + any_order=True + ) diff --git a/tests/sonic_package_manager/test_manifest.py b/tests/sonic_package_manager/test_manifest.py new file mode 100644 index 000000000000..efdcc558ab8b --- /dev/null +++ b/tests/sonic_package_manager/test_manifest.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python + +import pytest + +from sonic_package_manager.constraint import ComponentConstraints +from sonic_package_manager.manifest import Manifest, ManifestError +from sonic_package_manager.version import VersionRange + + +def test_manifest_v1_defaults(): + manifest = Manifest.marshal({'package': {'name': 'test', + 'version': '1.0.0'}, + 'service': {'name': 'test'}}) + assert manifest['package']['depends'] == [] + assert manifest['package']['breaks'] == [] + assert manifest['package']['base-os'] == ComponentConstraints() + assert not manifest['service']['asic-service'] + assert manifest['service']['host-service'] + + +def test_manifest_v1_invalid_version(): + with pytest.raises(ManifestError): + Manifest.marshal({'package': {'version': 'abc', 'name': 'test'}, + 'service': {'name': 'test'}}) + + +def test_manifest_v1_invalid_package_constraint(): + with pytest.raises(ManifestError): + Manifest.marshal({'package': {'name': 'test', 'version': '1.0.0', + 'depends': ['swss>a']}, + 'service': {'name': 'test'}}) + + +def test_manifest_v1_service_spec(): + manifest = Manifest.marshal({'package': {'name': 'test', + 'version': '1.0.0'}, + 'service': {'name': 'test', 'asic-service': True}}) + assert manifest['service']['asic-service'] + + +def test_manifest_v1_mounts(): + manifest = Manifest.marshal({'version': '1.0.0', 'package': {'name': 'test', + 'version': '1.0.0'}, + 'service': {'name': 'cpu-report'}, + 'container': {'privileged': True, + 'mounts': [{'source': 'a', 'target': 'b', 'type': 'bind'}]}}) + assert manifest['container']['mounts'][0]['source'] == 'a' + assert manifest['container']['mounts'][0]['target'] == 'b' + assert manifest['container']['mounts'][0]['type'] == 'bind' + + +def test_manifest_v1_mounts_invalid(): + with pytest.raises(ManifestError): + Manifest.marshal({'version': '1.0.0', 'package': {'name': 'test', 'version': '1.0.0'}, + 'service': {'name': 'cpu-report'}, + 'container': {'privileged': True, + 'mounts': [{'not-source': 'a', 'target': 'b', 'type': 'bind'}]}}) + + +def test_manifest_v1_unmarshal(): + manifest_json_input = {'package': {'name': 'test', 'version': '1.0.0', + 'depends': [ + { + 'name': 'swss', + 'version': '>1.0.0', + 'components': {}, + } + ]}, + 'service': {'name': 'test'}} + manifest = Manifest.marshal(manifest_json_input) + manifest_json = manifest.unmarshal() + for key, section in manifest_json_input.items(): + for field, value in section.items(): + assert manifest_json[key][field] == value diff --git a/tests/sonic_package_manager/test_metadata.py b/tests/sonic_package_manager/test_metadata.py new file mode 100644 index 000000000000..aee2f49428f5 --- /dev/null +++ b/tests/sonic_package_manager/test_metadata.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +import contextlib +from unittest.mock import Mock, MagicMock + +from sonic_package_manager.database import PackageEntry +from sonic_package_manager.errors import MetadataError +from sonic_package_manager.metadata import MetadataResolver +from sonic_package_manager.version import Version + + +def test_metadata_resolver_local(mock_registry_resolver, mock_docker_api): + metadata_resolver = MetadataResolver(mock_docker_api, mock_registry_resolver) + # it raises exception because mock manifest is not a valid manifest + # but this is not a test objective, so just suppress the error. + with contextlib.suppress(MetadataError): + metadata_resolver.from_local('image') + mock_docker_api.labels.assert_called_once() + + +def test_metadata_resolver_remote(mock_registry_resolver, mock_docker_api): + metadata_resolver = MetadataResolver(mock_docker_api, mock_registry_resolver) + mock_registry = MagicMock() + mock_registry.manifest = MagicMock(return_value={'config': {'digest': 'some-digest'}}) + + def return_mock_registry(repository): + return mock_registry + + mock_registry_resolver.get_registry_for = Mock(side_effect=return_mock_registry) + # it raises exception because mock manifest is not a valid manifest + # but this is not a test objective, so just suppress the error. + with contextlib.suppress(MetadataError): + metadata_resolver.from_registry('test-repository', '1.2.0') + mock_registry_resolver.get_registry_for.assert_called_once_with('test-repository') + mock_registry.manifest.assert_called_once_with('test-repository', '1.2.0') + mock_registry.blobs.assert_called_once_with('test-repository', 'some-digest') + mock_docker_api.labels.assert_not_called() diff --git a/tests/sonic_package_manager/test_reference.py b/tests/sonic_package_manager/test_reference.py new file mode 100644 index 000000000000..043b66ddd5d7 --- /dev/null +++ b/tests/sonic_package_manager/test_reference.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python + +import pytest + +from sonic_package_manager.reference import PackageReference + + +def test_reference(): + package_constraint = PackageReference.parse( + 'swss@sha256:9780f6d83e45878749497a6297ed9906c19ee0cc48cc88dc63827564bb8768fd' + ) + assert package_constraint.name == 'swss' + assert package_constraint.reference == 'sha256:9780f6d83e45878749497a6297ed9906c19ee0cc48cc88dc63827564bb8768fd' + + +def test_reference_invalid(): + with pytest.raises(ValueError): + PackageReference.parse('swssfdsf') diff --git a/tests/sonic_package_manager/test_registry.py b/tests/sonic_package_manager/test_registry.py new file mode 100644 index 000000000000..0d82499df3d8 --- /dev/null +++ b/tests/sonic_package_manager/test_registry.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python + +from sonic_package_manager.registry import RegistryResolver + + +def test_get_registry_for(): + resolver = RegistryResolver() + registry = resolver.get_registry_for('debian') + assert registry is resolver.DockerHubRegistry + registry = resolver.get_registry_for('Azure/sonic') + assert registry is resolver.DockerHubRegistry + registry = resolver.get_registry_for('registry-server:5000/docker') + assert registry.url == 'https://registry-server:5000' + registry = resolver.get_registry_for('registry-server.com/docker') + assert registry.url == 'https://registry-server.com' diff --git a/tests/sonic_package_manager/test_service_creator.py b/tests/sonic_package_manager/test_service_creator.py new file mode 100644 index 000000000000..fec8de600cc7 --- /dev/null +++ b/tests/sonic_package_manager/test_service_creator.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python + +import os +from unittest.mock import Mock, MagicMock + +import pytest + +from sonic_package_manager.database import PackageEntry +from sonic_package_manager.manifest import Manifest +from sonic_package_manager.metadata import Metadata +from sonic_package_manager.package import Package +from sonic_package_manager.service_creator.creator import * +from sonic_package_manager.service_creator.feature import FeatureRegistry + + +@pytest.fixture +def manifest(): + return Manifest.marshal({ + 'package': { + 'name': 'test', + 'version': '1.0.0', + }, + 'service': { + 'name': 'test', + 'requires': ['database'], + 'after': ['database', 'swss', 'syncd'], + 'before': ['ntp-config'], + 'dependent-of': ['swss'], + 'asic-service': False, + 'host-service': True, + }, + 'container': { + 'privileged': True, + 'volumes': [ + '/etc/sonic:/etc/sonic:ro' + ] + } + }) + + +def test_service_creator(sonic_fs, manifest, mock_feature_registry, mock_sonic_db): + creator = ServiceCreator(mock_feature_registry, mock_sonic_db) + entry = PackageEntry('test', 'azure/sonic-test') + package = Package(entry, Metadata(manifest)) + creator.create(package) + + assert sonic_fs.exists(os.path.join(ETC_SONIC_PATH, 'swss_dependent')) + assert sonic_fs.exists(os.path.join(DOCKER_CTL_SCRIPT_LOCATION, 'test.sh')) + assert sonic_fs.exists(os.path.join(SERVICE_MGMT_SCRIPT_LOCATION, 'test.sh')) + assert sonic_fs.exists(os.path.join(SYSTEMD_LOCATION, 'test.service')) + + +def test_service_creator_with_timer_unit(sonic_fs, manifest, mock_feature_registry, mock_sonic_db): + creator = ServiceCreator(mock_feature_registry, mock_sonic_db) + entry = PackageEntry('test', 'azure/sonic-test') + package = Package(entry, Metadata(manifest)) + creator.create(package) + + assert not sonic_fs.exists(os.path.join(SYSTEMD_LOCATION, 'test.timer')) + + manifest['service']['delayed'] = True + package = Package(entry, Metadata(manifest)) + creator.create(package) + + assert sonic_fs.exists(os.path.join(SYSTEMD_LOCATION, 'test.timer')) + + +def test_service_creator_with_debug_dump(sonic_fs, manifest, mock_feature_registry, mock_sonic_db): + creator = ServiceCreator(mock_feature_registry, mock_sonic_db) + entry = PackageEntry('test', 'azure/sonic-test') + package = Package(entry, Metadata(manifest)) + creator.create(package) + + assert not sonic_fs.exists(os.path.join(DEBUG_DUMP_SCRIPT_LOCATION, 'test')) + + manifest['package']['debug-dump'] = '/some/command' + package = Package(entry, Metadata(manifest)) + creator.create(package) + + assert sonic_fs.exists(os.path.join(DEBUG_DUMP_SCRIPT_LOCATION, 'test')) + + +def test_service_creator_initial_config(sonic_fs, manifest, mock_feature_registry, mock_sonic_db): + mock_table = Mock() + mock_table.get = Mock(return_value=(True, (('field_2', 'original_value_2'),))) + mock_sonic_db.initial_table = Mock(return_value=mock_table) + mock_sonic_db.persistent_table = Mock(return_value=mock_table) + mock_sonic_db.running_table = Mock(return_value=mock_table) + + creator = ServiceCreator(mock_feature_registry, mock_sonic_db) + + entry = PackageEntry('test', 'azure/sonic-test') + package = Package(entry, Metadata(manifest)) + creator.create(package) + + assert not sonic_fs.exists(os.path.join(DEBUG_DUMP_SCRIPT_LOCATION, 'test')) + + manifest['package']['init-cfg'] = { + 'TABLE_A': { + 'key_a': { + 'field_1': 'value_1', + 'field_2': 'value_2' + }, + }, + } + package = Package(entry, Metadata(manifest)) + + creator.create(package) + mock_table.set.assert_called_with('key_a', [('field_1', 'value_1'), + ('field_2', 'original_value_2')]) + + creator.remove(package) + mock_table._del.assert_called_with('key_a') + + +def test_feature_registration(mock_sonic_db, manifest): + mock_feature_table = Mock() + mock_feature_table.get = Mock(return_value=(False, ())) + mock_sonic_db.initial_table = Mock(return_value=mock_feature_table) + mock_sonic_db.persistent_table = Mock(return_value=mock_feature_table) + mock_sonic_db.running_table = Mock(return_value=mock_feature_table) + feature_registry = FeatureRegistry(mock_sonic_db) + feature_registry.register(manifest) + mock_feature_table.set.assert_called_with('test', [ + ('state', 'disabled'), + ('auto_restart', 'enabled'), + ('high_mem_alert', 'disabled'), + ('set_owner', 'local'), + ('has_per_asic_scope', 'False'), + ('has_global_scope', 'True'), + ('has_timer', 'False'), + ]) + + +def test_feature_registration_with_timer(mock_sonic_db, manifest): + manifest['service']['delayed'] = True + mock_feature_table = Mock() + mock_feature_table.get = Mock(return_value=(False, ())) + mock_sonic_db.initial_table = Mock(return_value=mock_feature_table) + mock_sonic_db.persistent_table = Mock(return_value=mock_feature_table) + mock_sonic_db.running_table = Mock(return_value=mock_feature_table) + feature_registry = FeatureRegistry(mock_sonic_db) + feature_registry.register(manifest) + mock_feature_table.set.assert_called_with('test', [ + ('state', 'disabled'), + ('auto_restart', 'enabled'), + ('high_mem_alert', 'disabled'), + ('set_owner', 'local'), + ('has_per_asic_scope', 'False'), + ('has_global_scope', 'True'), + ('has_timer', 'True'), + ]) + + +def test_feature_registration_with_non_default_owner(mock_sonic_db, manifest): + mock_feature_table = Mock() + mock_feature_table.get = Mock(return_value=(False, ())) + mock_sonic_db.initial_table = Mock(return_value=mock_feature_table) + mock_sonic_db.persistent_table = Mock(return_value=mock_feature_table) + mock_sonic_db.running_table = Mock(return_value=mock_feature_table) + feature_registry = FeatureRegistry(mock_sonic_db) + feature_registry.register(manifest, owner='kube') + mock_feature_table.set.assert_called_with('test', [ + ('state', 'disabled'), + ('auto_restart', 'enabled'), + ('high_mem_alert', 'disabled'), + ('set_owner', 'kube'), + ('has_per_asic_scope', 'False'), + ('has_global_scope', 'True'), + ('has_timer', 'False'), + ]) diff --git a/tests/sonic_package_manager/test_utils.py b/tests/sonic_package_manager/test_utils.py new file mode 100644 index 000000000000..c4d8b158408c --- /dev/null +++ b/tests/sonic_package_manager/test_utils.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python + +from sonic_package_manager import utils + + +def test_make_python_identifier(): + assert utils.make_python_identifier('-some-package name').isidentifier() + assert utils.make_python_identifier('01 leading digit').isidentifier()