From 012e3be697a7eef25e18c64f70b434393541e12d Mon Sep 17 00:00:00 2001 From: crayon <873217631@qq.com> Date: Fri, 23 Dec 2022 15:20:01 +0800 Subject: [PATCH] =?UTF-8?q?feature:=20=E6=8F=90=E4=BE=9B=202.0=20Agent=20&?= =?UTF-8?q?=20Proxy=20=E5=AE=89=E8=A3=85=E5=8C=85=E5=88=B6=E4=BD=9C?= =?UTF-8?q?=E8=83=BD=E5=8A=9B=20(closed=20#1176)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../agent/artifact_builder/__init__.py | 10 + apps/backend/agent/artifact_builder/agent.py | 38 ++ apps/backend/agent/artifact_builder/base.py | 450 ++++++++++++++++++ apps/backend/agent/artifact_builder/proxy.py | 99 ++++ apps/backend/agent/tools.py | 5 +- .../components/collections/agent_new/base.py | 18 +- .../agent_new/run_upgrade_command.py | 22 +- apps/backend/exceptions.py | 9 +- .../management/commands/init_agents.py | 82 ++++ .../commands/init_official_plugins.py | 97 ++-- apps/backend/plugin/views.py | 4 +- .../config_context/context_helper.py | 16 +- .../tests/agent/artifact_builder/__init__.py | 10 + .../agent/artifact_builder/test_agent.py | 59 +++ .../artifact_builder/test_manage_commands.py | 67 +++ .../agent/artifact_builder/test_proxy.py | 22 + apps/backend/tests/agent/utils.py | 263 ++++++++++ .../agent_new/test_run_upgrade_command.py | 11 +- .../commands/test_copy_file_to_nginx.py | 4 +- .../tests/plugin/test_manage_commands.py | 5 +- .../plugin/views/test_plugin_production.py | 5 +- apps/mock_data/utils.py | 64 +++ apps/node_man/constants.py | 56 +++ apps/node_man/models.py | 6 +- apps/utils/files.py | 12 + config/default.py | 1 + env/__init__.py | 4 +- official_plugin/gse_agent/.gitkeep | 0 official_plugin/gse_proxy/.gitkeep | 0 .../kubernetes/helm/bk-nodeman/Chart.lock | 14 +- .../kubernetes/helm/bk-nodeman/Chart.yaml | 8 +- .../kubernetes/helm/bk-nodeman/README.md | 206 ++++---- .../helm/bk-nodeman/templates/NOTES.txt | 18 +- .../helm/bk-nodeman/templates/_helpers.tpl | 34 ++ .../templates/backend-api/deployment.yaml | 10 +- .../backend-celery/baworker-deployment.yaml | 10 +- .../backend-celery/bworker-deployment.yaml | 10 +- .../celery-beat-deployment.yaml | 10 +- .../common-pworker-deployment.yaml | 10 +- .../common-worker-deployment.yaml | 10 +- .../backend-celery/dworker-deployment.yaml | 10 +- .../backend-celery/paworker-deployment.yaml | 10 +- .../backend-celery/psworker-deployment.yaml | 10 +- .../backend-celery/pworker-deployment.yaml | 10 +- .../templates/configmaps/env-configmap.yaml | 1 + .../configmaps/gse-cert-configmap.yaml | 37 ++ .../migrate-jobs/db-migrate-job.yaml | 8 +- .../migrate-jobs/file-sync-migrate-job.yaml | 8 +- .../templates/saas-api/deployment.yaml | 10 +- .../sync-host-deployment.yaml | 10 +- .../sync-host-re-deployment.yaml | 10 +- .../sync-process-deployment.yaml | 10 +- .../sync-watch-deployment.yaml | 10 +- .../bk-nodeman/templates/web/deployment.yaml | 24 +- .../kubernetes/helm/bk-nodeman/values.yaml | 38 ++ .../images/family_bucket/Dockerfile | 1 + 56 files changed, 1654 insertions(+), 332 deletions(-) create mode 100644 apps/backend/agent/artifact_builder/__init__.py create mode 100644 apps/backend/agent/artifact_builder/agent.py create mode 100644 apps/backend/agent/artifact_builder/base.py create mode 100644 apps/backend/agent/artifact_builder/proxy.py create mode 100644 apps/backend/management/commands/init_agents.py create mode 100644 apps/backend/tests/agent/artifact_builder/__init__.py create mode 100644 apps/backend/tests/agent/artifact_builder/test_agent.py create mode 100644 apps/backend/tests/agent/artifact_builder/test_manage_commands.py create mode 100644 apps/backend/tests/agent/artifact_builder/test_proxy.py create mode 100644 apps/backend/tests/agent/utils.py create mode 100644 official_plugin/gse_agent/.gitkeep create mode 100644 official_plugin/gse_proxy/.gitkeep create mode 100644 support-files/kubernetes/helm/bk-nodeman/templates/configmaps/gse-cert-configmap.yaml diff --git a/apps/backend/agent/artifact_builder/__init__.py b/apps/backend/agent/artifact_builder/__init__.py new file mode 100644 index 000000000..29ed269e0 --- /dev/null +++ b/apps/backend/agent/artifact_builder/__init__.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +""" +TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-节点管理(BlueKing-BK-NODEMAN) available. +Copyright (C) 2017-2022 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at https://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/apps/backend/agent/artifact_builder/agent.py b/apps/backend/agent/artifact_builder/agent.py new file mode 100644 index 000000000..0d598df4a --- /dev/null +++ b/apps/backend/agent/artifact_builder/agent.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +""" +TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-节点管理(BlueKing-BK-NODEMAN) available. +Copyright (C) 2017-2022 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at https://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import logging +import os +import tarfile +import typing + +from apps.node_man import constants + +from . import base + +logger = logging.getLogger("app") + + +class AgentArtifactBuilder(base.BaseArtifactBuilder): + + NAME = constants.GsePackageCode.AGENT.value + PKG_DIR = constants.GsePackageDir.AGENT.value + CERT_FILENAMES = [constants.GseCert.CA.value, constants.GseCert.AGENT_CERT.value, constants.GseCert.AGENT_KEY.value] + + def extract_initial_artifact(self, initial_artifact_local_path: str, extract_dir: str): + with tarfile.open(name=initial_artifact_local_path) as tf: + tf.extractall(path=extract_dir) + extract_dir: str = os.path.join(extract_dir, self.BASE_PKG_DIR) + self._inject_dependencies(extract_dir) + return extract_dir + + def _get_support_files_info(self, extract_dir: str) -> typing.Dict[str, typing.Any]: + # Agent 包管理实现 + pass diff --git a/apps/backend/agent/artifact_builder/base.py b/apps/backend/agent/artifact_builder/base.py new file mode 100644 index 000000000..b4fc3f553 --- /dev/null +++ b/apps/backend/agent/artifact_builder/base.py @@ -0,0 +1,450 @@ +# -*- coding: utf-8 -*- +""" +TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-节点管理(BlueKing-BK-NODEMAN) available. +Copyright (C) 2017-2022 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at https://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +import abc +import logging +import os +import shutil +import tarfile +import typing + +from django.conf import settings +from django.utils.translation import ugettext_lazy as _ + +from apps.backend import exceptions +from apps.core.files.storage import get_storage +from apps.node_man import constants +from apps.utils import cache, files + +logger = logging.getLogger("app") + + +class BaseArtifactBuilder(abc.ABC): + + # 最终的制品名称 + NAME: str = None + # 安装包的打包根路径 + PKG_DIR: str = None + # 安装包打包前的归档路径模版 + PKG_PATH_DIR_TEMPL: str = "agent_{os}_{cpu_arch}" + # 原始制品的打包根路径 + BASE_PKG_DIR: str = "gse" + # 证书文件名列表 + CERT_FILENAMES: typing.List[str] = None + + # 制品存储的根路径 + BASE_STORAGE_DIR: str = "agent" + + def __init__( + self, + initial_artifact_path: str, + cert_path: typing.Optional[str] = None, + download_path: typing.Optional[str] = None, + overwrite_version: typing.Optional[str] = None, + ): + """ + :param initial_artifact_path: 原始制品所在路径 + :param cert_path: 证书目录 + :param download_path: 归档路径 + :param overwrite_version: 版本号,用于覆盖原始制品内的版本信息 + """ + self.initial_artifact_path = initial_artifact_path + self.cert_path = cert_path or settings.GSE_CERT_PATH + self.download_path = download_path or settings.DOWNLOAD_PATH + self.overwrite_version = overwrite_version + + # 原始制品名称 + self.initial_artifact_filename = os.path.basename(initial_artifact_path) + # 已申请的临时目录 + self.applied_tmp_dirs = set() + # 文件源 + self.storage = get_storage(file_overwrite=True) + + @staticmethod + def download_file(file_path: str, target_path: str): + """ + 下载文件 + :param file_path: 文件路径 + :param target_path: 目标路径 + :return: + """ + + storage = get_storage() + if not storage.exists(name=file_path): + raise exceptions.FileNotExistError(_("文件不存在:file_path -> {file_path}").format(file_path=file_path)) + + logger.info(f"start to download file -> {file_path} to {target_path}") + + with storage.open(name=file_path, mode="rb") as fs: + with files.FileOpen(name=target_path, mode="wb") as local_fs: + for chunk in iter(lambda: fs.read(4096), b""): + if not chunk: + continue + local_fs.write(chunk) + + logger.info(f"download file -> {file_path} to {target_path} success.") + + @abc.abstractmethod + def extract_initial_artifact(self, initial_artifact_local_path: str, extract_dir: str) -> str: + """ + 解压原始制品到指定目录下,大致流程: + 1. 解压并转为标准格式:${node_type}_${os}_${cpu_arch} + 2. 注入证书 + 待定:是否把 meta 信息都丢到 meta 目录 + + 得到的标准结构示例(Agent 2.0): + gse + ├── CHANGELOG.md + ├── VERSION + ├── agent_linux_x86_64 + │ ├── bin + │ │ ├── gse_agent + │ │ └── gsectl + │ └── cert + │ ├── gse_agent.crt + │ ├── gse_agent.key + │ ├── gse_api_client.crt + │ ├── gse_api_client.key + │ ├── gse_server.crt + │ ├── gse_server.key + │ └── gseca.crt + ├── agent_windows_x86 + │ ├── bin + │ │ ├── gse_agent.exe + │ │ ├── gse_agent_daemon.exe + │ │ └── gsectl.bat + │ └── cert + │ ├── gse_agent.crt + │ ├── gse_agent.key + │ ├── gse_api_client.crt + │ ├── gse_api_client.key + │ ├── gse_server.crt + │ ├── gse_server.key + │ └── gseca.crt + └── support-files + ├── env + └── templates + └── gse_agent_conf.template + :param initial_artifact_local_path: 原始制品库本地存放路径 + :param extract_dir: 解压目录 + :return: + """ + raise NotImplementedError + + @abc.abstractmethod + def _get_support_files_info(self, extract_dir: str) -> typing.Dict[str, typing.Any]: + """ + 获取部署依赖文件合集:配置 / 环境变量等 + :param extract_dir: 解压目录 + :return: + """ + raise NotImplementedError + + def _inject_dependencies(self, extract_dir: str): + """ + 注入依赖文件,大致流程: + 1. 注入证书 + 2. 注入 gsectl + 3. 赋予部分文件可执行权限 + :param extract_dir: 解压目录 + :return: + """ + for pkg_dir_name in os.listdir(extract_dir): + # 通过正则提取出插件(plugin)目录名中的插件信息 + re_match = constants.AGENT_PATH_RE.match(pkg_dir_name) + if re_match is None: + logger.info("pkg_dir_name -> {pkg_dir_name} is not match, jump it.".format(pkg_dir_name=pkg_dir_name)) + continue + + # 证书目标路径 + cert_dst: str = os.path.join(extract_dir, pkg_dir_name, "cert") + + if os.path.exists(cert_dst): + # 存在即移除,确保证书目标路径是干净的 + shutil.rmtree(cert_dst) + logger.warning(f"cert_dst -> {cert_dst} not clean, removed it.") + # 创建一个干净的证书目录 + os.makedirs(cert_dst, exist_ok=True) + + # 注入证书 + injected_cert_filenames: typing.List[str] = [] + for cert_filename in self.CERT_FILENAMES: + cert_filepath: str = os.path.join(self.cert_path, cert_filename) + if not os.path.exists(cert_filepath): + # 在部分场景下可能本身就不需要证书,此处暂不抛异常 + logger.warning(f"cert file -> {cert_filepath} not exist, jump it.") + injected_cert_filenames.append(cert_filename) + shutil.copyfile(cert_filepath, os.path.join(cert_dst, cert_filename)) + + logger.info(f"copy ({','.join(injected_cert_filenames)}) from {self.cert_path} to {cert_dst} success.") + + # 注入 gsectl + package_base_info: typing.Dict[str, str] = re_match.groupdict() + gsectl_filename: str = ("gsectl", "gsectl.bat")[package_base_info["os"] == constants.PluginOsType.windows] + gsectl_file_path: str = os.path.join( + self.download_path, "gsectl", self.PKG_DIR, package_base_info["os"], gsectl_filename + ) + if not self.storage.exists(gsectl_file_path): + raise exceptions.FileNotExistError( + _("gsectl 文件不存在:file_path -> {file_path}").format(file_path=gsectl_file_path) + ) + + # 将 gsectl 放置到 bin 目录下 + pkg_bin_dir: str = os.path.join(extract_dir, pkg_dir_name, "bin") + gsectl_target_file_path: str = os.path.join(pkg_bin_dir, gsectl_filename) + # 从文件源拿 gsectl 可以保证打包的依赖与项目的版本解耦,在依赖出现问题后仅需更新文件源文件,无需出包 + with self.storage.open(gsectl_file_path, mode="rb") as fs: + # mode 指定 w,覆盖现有文件 + with open(gsectl_target_file_path, mode="wb") as local_fs: + local_fs.write(fs.read()) + logger.info(f"copy gsectl -> {gsectl_file_path} to {gsectl_target_file_path} success.") + + # 为二进制文件授予可执行权限 + for file_path in files.fetch_file_paths_from_dir(dir_path=pkg_bin_dir): + files.make_executable(file_path) + logger.info(f"make file -> {file_path} executable.") + + def _list_package_dir_infos(self, extract_dir: str) -> typing.List[typing.Dict]: + """ + 解析并获取安装包目录信息 + :param extract_dir: 解压目录 + :return: + """ + package_dir_infos: typing.List[typing.Dict] = [] + for pkg_dir_name in os.listdir(extract_dir): + # 通过正则提取出插件(plugin)目录名中的插件信息 + re_match = constants.AGENT_PATH_RE.match(pkg_dir_name) + if re_match is None: + logger.info( + "pkg_dir_name -> {pkg_dir_name} is not match re, jump it.".format(pkg_dir_name=pkg_dir_name) + ) + continue + + package_base_info: typing.Dict[str, str] = re_match.groupdict() + package_dir_infos.append( + { + "extract_dir": extract_dir, + "os": package_base_info["os"], + "cpu_arch": package_base_info["cpu_arch"], + "pkg_relative_path": pkg_dir_name, + "pkg_absolute_path": os.path.join(extract_dir, pkg_dir_name), + } + ) + + return package_dir_infos + + def make_and_upload_package( + self, package_dir_info: typing.Dict[str, typing.Any], artifact_meta_info: typing.Dict[str, typing.Any] + ) -> typing.Dict[str, typing.Any]: + """ + 制作并上传安装包 + + 上传到文件源的标准结构示例: + agent + ├── linux + │ └── x86_64 + │ └── gse_agent-1.0.1.tgz + └── windows + └── x86 + └── gse_agent-1.0.1.tgz + + TODO 关于同版本分为「企业版」「内部版」的管理设想 + - 明确原则:首先必须当成同一个版本去维护管理 + - 怎么管理:接入点「服务器目录」+「Agent 版本」可以唯一确定一个包 + - pkg - path(md5),pkg - path1(md5) + - 证书区分:证书路径放到接入点维护 + + :param package_dir_info: 安装包信息 + :param artifact_meta_info: 基础信息 + :return: + """ + name: str = artifact_meta_info["name"] + os_str: str = package_dir_info["os"] + cpu_arch: str = package_dir_info["cpu_arch"] + version_str: str = artifact_meta_info["version"] + pkg_name: str = f"{name}-{version_str}.tgz" + + package_tmp_path = os.path.join(self.apply_tmp_dir(), self.BASE_STORAGE_DIR, os_str, cpu_arch, pkg_name) + os.makedirs(os.path.dirname(package_tmp_path), exist_ok=True) + + with tarfile.open(package_tmp_path, "w:gz") as tf: + tf.add(package_dir_info["pkg_absolute_path"], arcname=f"{self.PKG_DIR}/") + logger.info( + "project -> {project} version -> {version} " + "now is pack to package_tmp_path -> {package_tmp_path}".format( + project=name, version=version_str, package_tmp_path=package_tmp_path + ) + ) + + # 将 Agent 包上传到存储系统 + package_target_path = os.path.join(self.download_path, self.BASE_STORAGE_DIR, os_str, cpu_arch, pkg_name) + with open(package_tmp_path, mode="rb") as tf: + # 采用同名覆盖策略,保证同版本 Agent 包仅保存一份 + storage_path = self.storage.save(package_target_path, tf) + if storage_path != package_target_path: + raise exceptions.CreatePackageRecordError( + _("Agent 包保存错误,期望保存到 -> {package_target_path}, 实际保存到 -> {storage_path}").format( + package_target_path=package_target_path, storage_path=storage_path + ) + ) + + logger.info( + "package -> {pkg_name} upload to package_target_path -> {package_target_path} success".format( + pkg_name=pkg_name, package_target_path=package_target_path + ) + ) + + return { + "pkg_name": pkg_name, + "md5": files.md5sum(name=package_tmp_path), + "pkg_size": os.path.getsize(package_tmp_path), + "pkg_path": os.path.dirname(package_target_path), + } + + def apply_tmp_dir(self) -> str: + """ + 创建临时目录并返回路径 + :return: + """ + tmp_dir: str = files.mk_and_return_tmpdir() + self.applied_tmp_dirs.add(tmp_dir) + return tmp_dir + + @cache.class_member_cache() + def _get_version(self, extract_dir: str) -> str: + """ + 获取版本号 + :param extract_dir: 解压目录 + :return: + """ + # 优先使用覆盖版本号 + if self.overwrite_version: + return self.overwrite_version + + version_file_path: str = os.path.join(extract_dir, "VERSION") + if not os.path.exists(version_file_path): + raise exceptions.FileNotExistError(_("版本文件不存在")) + with open(version_file_path, "r", encoding="utf-8") as version_fs: + untreated_version_str: str = version_fs.read() + version_match: typing.Optional[typing.Match] = constants.SEMANTIC_VERSION_PATTERN.search( + untreated_version_str or "" + ) + if version_match: + return version_match.group() + else: + raise exceptions.NotSemanticVersionError({"version": version_match}) + + @cache.class_member_cache() + def _get_changelog(self, extract_dir: str) -> str: + """ + 获取版本日志 + :param extract_dir: 解压目录 + :return: + """ + changelog_file_path: str = os.path.join(extract_dir, "CHANGELOG.md") + if not os.path.exists(changelog_file_path): + raise exceptions.FileNotExistError(_("版本日志文件不存在")) + with open(changelog_file_path, "r", encoding="utf-8") as changelog_fs: + changelog: str = changelog_fs.read() + return changelog + + def update_or_create_record(self, artifact_meta_info: typing.Dict[str, typing.Any]): + """ + 创建或更新制品记录,待 Agent 包管理完善 + :param artifact_meta_info: + :return: + """ + pass + + def update_or_create_package_records(self, package_infos: typing.List[typing.Dict]): + """ + 创建或更新安装包记录,待 Agent 包管理完善 + :param package_infos: + :return: + """ + pass + + def get_artifact_meta_info(self, extract_dir: str) -> typing.Dict[str, typing.Any]: + """ + 获取制品的基础信息、配置文件信息 + :param extract_dir: 解压目录 + :return: + """ + # 版本 + version_str: str = self._get_version(extract_dir) + # 配置文件 + support_files_info = self._get_support_files_info(extract_dir) + # changelog + changelog: str = self._get_changelog(extract_dir) + + return { + "name": self.NAME, + "version": version_str, + "changelog": changelog, + "support_files_info": support_files_info, + } + + def list_package_dir_infos(self) -> typing.Tuple[str, typing.List[typing.Dict]]: + # 下载原始制品 + initial_artifact_local_path: str = os.path.join( + self.apply_tmp_dir(), os.path.basename(self.initial_artifact_path) + ) + self.download_file(self.initial_artifact_path, initial_artifact_local_path) + # 进行解压 + extract_dir: str = self.extract_initial_artifact(initial_artifact_local_path, self.apply_tmp_dir()) + return extract_dir, self._list_package_dir_infos(extract_dir=extract_dir) + + def make( + self, + operator: typing.Optional[str] = None, + select_pkg_relative_paths: typing.Optional[typing.List[str]] = None, + ): + """ + 制作适配于指定机型的安装包 + :param operator: 操作人 + :param select_pkg_relative_paths: 已选择的需要导入的安装包 + :return: + """ + package_infos: typing.List[typing.Dict] = [] + extract_dir, package_dir_infos = self.list_package_dir_infos() + artifact_meta_info: typing.Dict[str, typing.Any] = self.get_artifact_meta_info(extract_dir) + + for package_dir_info in package_dir_infos: + if not ( + select_pkg_relative_paths is None or package_dir_info["pkg_relative_path"] in select_pkg_relative_paths + ): + logger.info("path -> {path} not selected, jump it".format(path=package_dir_info["pkg_relative_path"])) + continue + + package_upload_info: typing.Dict[str, typing.Any] = self.make_and_upload_package( + package_dir_info, artifact_meta_info + ) + package_infos.append( + { + "artifact_meta_info": artifact_meta_info, + "package_dir_info": package_dir_info, + "package_upload_info": package_upload_info, + } + ) + + artifact_meta_info["operator"] = operator + self.update_or_create_record(artifact_meta_info) + self.update_or_create_package_records(package_infos) + + def __enter__(self) -> "BaseArtifactBuilder": + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + # 退出前清理临时目录,避免打包导致存储膨胀 + for to_be_removed_tmp_dir in self.applied_tmp_dirs: + if os.path.exists(to_be_removed_tmp_dir): + shutil.rmtree(to_be_removed_tmp_dir) + self.applied_tmp_dirs.clear() diff --git a/apps/backend/agent/artifact_builder/proxy.py b/apps/backend/agent/artifact_builder/proxy.py new file mode 100644 index 000000000..09508b6e8 --- /dev/null +++ b/apps/backend/agent/artifact_builder/proxy.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +""" +TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-节点管理(BlueKing-BK-NODEMAN) available. +Copyright (C) 2017-2022 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at https://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import logging +import os +import shutil +import tarfile +import typing + +from django.conf import settings +from django.utils.translation import ugettext_lazy as _ + +from apps.node_man import constants + +from . import base + +logger = logging.getLogger("app") + + +class ProxyArtifactBuilder(base.BaseArtifactBuilder): + + NAME = constants.GsePackageCode.PROXY.value + PKG_DIR = constants.GsePackageDir.PROXY.value + CERT_FILENAMES: typing.List[str] = constants.GseCert.list_member_values() + + # 服务二进制目录 + SERVER_BIN_DIR: str = "server/bin" + # 所需的二进制文件 + PROXY_SVR_EXES: typing.List[str] = ["gse_data", "gse_file"] + + def extract_initial_artifact(self, initial_artifact_local_path: str, extract_dir: str): + with tarfile.open(name=initial_artifact_local_path) as tf: + tf.extractall(path=extract_dir) + + extract_dir: str = os.path.join(extract_dir, self.BASE_PKG_DIR) + if not os.path.exists(extract_dir): + raise FileExistsError(_("Proxy 包解压后不存在 {base_pkg_dir} 目录").format(base_pkg_dir=self.BASE_PKG_DIR)) + + # 把基础 Agent 包拉过来 + os_str: str = constants.PluginOsType.linux + cpu_arch: str = constants.CpuType.x86_64 + pkg_name: str = f"{constants.GsePackageCode.AGENT.value}-{self._get_version(extract_dir)}.tgz" + base_agent_pkg_path: str = os.path.join( + settings.DOWNLOAD_PATH, self.BASE_STORAGE_DIR, os_str, cpu_arch, pkg_name + ) + if not self.storage.exists(base_agent_pkg_path): + raise FileExistsError( + _("构建 Proxy 所需 Agent 包不存在:file_path -> {file_path}").format(file_path=base_agent_pkg_path) + ) + + base_agent_src: str = os.path.join(extract_dir, str(constants.GsePackageDir.AGENT.value)) + # src 存在即移除,确保基础 Agent 解压目录是干净的 + if os.path.exists(base_agent_src): + shutil.rmtree(base_agent_src) + logger.warning(f"base_agent_src -> {base_agent_src} not clean, removed it.") + + # 执行解压 + with self.storage.open(name=base_agent_pkg_path, mode="rb") as tf_from_storage: + with tarfile.open(fileobj=tf_from_storage) as tf: + tf.extractall(extract_dir) + logger.info(f"file -> {base_agent_pkg_path} extract to dir -> {extract_dir} success.") + + # 按正则规范构建 Proxy 安装包的目录 + proxy_dir: str = os.path.join(extract_dir, self.PKG_PATH_DIR_TEMPL.format(os=os_str, cpu_arch=cpu_arch)) + os.rename(base_agent_src, proxy_dir) + logger.info(f"rename base_agent_src -> {base_agent_src} to proxy_dir -> {proxy_dir}") + + # bin 目录检查 + proxy_bin_dir: str = os.path.join(proxy_dir, "bin") + if not os.path.exists(proxy_bin_dir): + raise FileExistsError(_("构建 Proxy 所需 Agent 不存在 bin 路径")) + + # 将所需的二进制放到安装包目录 + for svr_exe in self.PROXY_SVR_EXES: + svr_exe_path: str = os.path.join(extract_dir, self.SERVER_BIN_DIR, svr_exe) + if not os.path.exists(svr_exe_path): + raise FileExistsError( + _("构建 Proxy 所需二进制 [{svr_exe}] 不存在:svr_exe_path -> {svr_exe_path}").format( + svr_exe=svr_exe, svr_exe_path=svr_exe_path + ) + ) + + svr_exe_dst_path: str = os.path.join(proxy_bin_dir, svr_exe) + shutil.copyfile(svr_exe_path, svr_exe_dst_path) + logger.info(f"copy {svr_exe} from {svr_exe_path} to {svr_exe_dst_path} success.") + + self._inject_dependencies(extract_dir) + return extract_dir + + def _get_support_files_info(self, extract_dir: str) -> typing.Dict[str, typing.Any]: + # Agent 包管理实现 + pass diff --git a/apps/backend/agent/tools.py b/apps/backend/agent/tools.py index 4d54b0a64..f2f7d557f 100644 --- a/apps/backend/agent/tools.py +++ b/apps/backend/agent/tools.py @@ -72,7 +72,10 @@ def __init__( def gen_nginx_download_url(nginx_ip: str) -> str: - return f"http://{nginx_ip}:{settings.BK_NODEMAN_NGINX_DOWNLOAD_PORT}/" + if basic.is_v6(nginx_ip): + return f"http://[{nginx_ip}]:{settings.BK_NODEMAN_NGINX_DOWNLOAD_PORT}/" + else: + return f"http://{nginx_ip}:{settings.BK_NODEMAN_NGINX_DOWNLOAD_PORT}/" def fetch_gse_servers_info( diff --git a/apps/backend/components/collections/agent_new/base.py b/apps/backend/components/collections/agent_new/base.py index c54cc6aa6..cb79921b6 100644 --- a/apps/backend/components/collections/agent_new/base.py +++ b/apps/backend/components/collections/agent_new/base.py @@ -166,17 +166,15 @@ def get_host_id__install_channel_map( host_id__ap_map: Dict[int, models.AccessPoint], cloud_id__proxies_map: Dict[int, List[models.Host]], ) -> Dict[int, Tuple[Optional[models.Host], Dict[str, List]]]: + install_channel_ids: List[int] = list({host.install_channel_id for host in hosts}) + install_channel_id__jump_servers_map: Dict[ + int, List[models.Host] + ] = models.InstallChannel.install_channel_id__host_objs_map(install_channel_ids) + + # 建立通道ID - 通道的映射关系 id__install_channel_obj_map: Dict[int, models.InstallChannel] = {} - install_channel_id__jump_servers_map: Dict[int, List[models.Host]] = defaultdict(list) - install_channel_objs = models.InstallChannel.objects.filter(id__in={host.install_channel_id for host in hosts}) - # 安装通道数量通常是个位数,兼顾可读性在循环中执行DB查询操作 - for install_channel_obj in install_channel_objs: + for install_channel_obj in models.InstallChannel.objects.filter(id__in=install_channel_ids): id__install_channel_obj_map[install_channel_obj.id] = install_channel_obj - install_channel_id__jump_servers_map[install_channel_obj.id] = list( - models.Host.objects.filter( - inner_ip__in=install_channel_obj.jump_servers, bk_cloud_id=install_channel_obj.bk_cloud_id - ) - ) cloud_id__alive_proxies_map: Dict[int, List[models.Host]] = defaultdict(list) for cloud_id, proxies in cloud_id__proxies_map.items(): @@ -193,7 +191,7 @@ def get_host_id__install_channel_map( jump_server = random.choice(install_channel_id__jump_servers_map[install_channel_obj.id]) except IndexError: self.move_insts_to_failed( - [sub_inst_id], log_content=_("所选安装通道「{name} 没有可用跳板机".format(name=install_channel_obj.name)) + [sub_inst_id], log_content=_("所选安装通道「{name}」 没有可用跳板机".format(name=install_channel_obj.name)) ) else: host_id__install_channel_map[host.bk_host_id] = (jump_server, install_channel_obj.upstream_servers) diff --git a/apps/backend/components/collections/agent_new/run_upgrade_command.py b/apps/backend/components/collections/agent_new/run_upgrade_command.py index 0284cf705..441a60eb4 100644 --- a/apps/backend/components/collections/agent_new/run_upgrade_command.py +++ b/apps/backend/components/collections/agent_new/run_upgrade_command.py @@ -9,9 +9,11 @@ specific language governing permissions and limitations under the License. """ import os +import typing from django.conf import settings +from apps.backend.agent.artifact_builder.proxy import ProxyArtifactBuilder from apps.node_man import constants, models from .base import AgentCommonData, AgentExecuteScriptService @@ -25,8 +27,8 @@ '/v gse_agent /t reg_sz /d "{setup_path}\\agent\\bin\\gsectl.bat start" /f 1>nul 2>&1' " && start gsectl.bat stop" " && ping -n 20 127.0.0.1 >> c:\\ping_ip.txt" - " && {temp_path}\\7z.exe x {temp_path}\\{package_name} -o{temp_path} -y 1>nul 2>&1" - " && {temp_path}\\7z.exe x {temp_path}\\{package_name_tar} -aot -o{setup_path} -y 1>nul 2>&1" + " && {temp_path}\\7z.exe x {temp_path}\\{package_name} -so |" + " {temp_path}\\7z.exe x -aot -si -ttar -o{setup_path} 1>nul 2>&1" " && gsectl.bat start" ) @@ -34,7 +36,7 @@ PROXY_RELOAD_CMD_TEMPLATE = """ result=0 count=0 -for proc in gse_agent gse_transit gse_btsvr gse_data; do +for proc in {procs}; do [ -f {setup_path}/{node_type}/bin/$proc ] && cd {setup_path}/{node_type}/bin && ./$proc --reload && \ count=$((count + 1)) sleep 1 @@ -46,7 +48,7 @@ """ # Agent 重载配置命令模板 -AGENT_RELOAD_CMD_TEMPLATE = "cd {setup_path}/{node_type}/bin && ./gse_agent --reload || ./gsectl restart all" +AGENT_RELOAD_CMD_TEMPLATE = "cd {setup_path}/{node_type}/bin && ./{procs} --reload || ./gsectl restart all" # 节点类型 - 重载命令模板映射关系 NODE_TYPE__RELOAD_CMD_TPL_MAP = { @@ -70,15 +72,23 @@ def get_script_content(self, data, common_data: AgentCommonData, host: models.Ho setup_path=agent_config["setup_path"], temp_path=agent_config["temp_path"], package_name=agent_upgrade_pkg_name, - package_name_tar=agent_upgrade_pkg_name.replace("tgz", "tar"), ) return scripts else: tpl_path = os.path.join(settings.BK_SCRIPTS_PATH, "upgrade_agent.sh.tpl") with open(tpl_path, encoding="utf-8") as fh: scripts = fh.read() + + if host.node_type == constants.NodeType.PROXY: + if common_data.agent_step_adapter.is_legacy: + procs: typing.List[str] = ["gse_agent", "gse_transit", "gse_btsvr", "gse_data"] + else: + procs: typing.List[str] = ["gse_agent"] + ProxyArtifactBuilder.PROXY_SVR_EXES + else: + procs: typing.List[str] = ["gse_agent"] + reload_cmd = NODE_TYPE__RELOAD_CMD_TPL_MAP[general_node_type].format( - setup_path=agent_config["setup_path"], node_type=general_node_type + setup_path=agent_config["setup_path"], node_type=general_node_type, procs=" ".join(procs) ) scripts = scripts.format( setup_path=agent_config["setup_path"], diff --git a/apps/backend/exceptions.py b/apps/backend/exceptions.py index 7cde2ae6e..c6346cdb7 100644 --- a/apps/backend/exceptions.py +++ b/apps/backend/exceptions.py @@ -17,8 +17,8 @@ class BackendBaseException(AppBaseException): MODULE_CODE = 2000 -class UploadPackageNotExistError(BackendBaseException): - MESSAGE = _("文件包不存在") +class FileNotExistError(BackendBaseException): + MESSAGE = _("文件不存在") ERROR_CODE = 1 @@ -78,3 +78,8 @@ class AgentConfigTemplateNotExistError(BackendBaseException): MESSAGE = _("配置模板不存在") MESSAGE_TPL = _("配置模板[{name}-{filename}-{os_type}-{cpu_arch}]不存在") ERROR_CODE = 12 + + +class NotSemanticVersionError(BackendBaseException): + MESSAGE_TPL = _("版本号 -> {version} 不符合语义化版本规则") + ERROR_CODE = 13 diff --git a/apps/backend/management/commands/init_agents.py b/apps/backend/management/commands/init_agents.py new file mode 100644 index 000000000..eef03f396 --- /dev/null +++ b/apps/backend/management/commands/init_agents.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +""" +TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-节点管理(BlueKing-BK-NODEMAN) available. +Copyright (C) 2017-2022 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at https://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +from __future__ import absolute_import, unicode_literals + +import os +import tarfile +import typing + +from django.conf import settings +from django.core.management.base import BaseCommand + +from apps.backend.agent.artifact_builder import agent, base, proxy +from apps.core.files.storage import get_storage +from apps.node_man import constants, models +from apps.utils import files +from common.log import logger + + +class Command(BaseCommand): + def add_arguments(self, parser): + parser.add_argument("-c", "--cert_path", help=f"证书目录,默认为 {settings.GSE_CERT_PATH}", type=str) + parser.add_argument("-d", "--download_path", help=f"归档路径,默认为 {settings.DOWNLOAD_PATH}", type=str) + parser.add_argument("-o", "--overwrite_version", help="版本号,用于覆盖原始制品内的版本信息", type=str) + + def handle(self, *args, **options): + """ + 初始化内置官方插件 + :param args: + :param options: + :return: None + """ + + storage = get_storage() + + for module in [constants.GsePackageCode.AGENT.value, constants.GsePackageCode.PROXY.value]: + logger.info(f"load module -> {module}") + module_dir: str = os.path.join(settings.BK_OFFICIAL_PLUGINS_INIT_PATH, str(module)) + if not os.path.exists(module_dir): + logger.info(f"module_dir -> {module_dir} not exist, skipped it.") + continue + for file_abs_path in files.fetch_file_paths_from_dir(dir_path=module_dir): + file_name: str = os.path.basename(file_abs_path) + if not tarfile.is_tarfile(file_abs_path): + logger.warning(f"file -> [{file_name}] is not tar file, will not try to import it.") + continue + + logger.info(f"start to upload [{file_name}] to storage.") + with open(file=file_abs_path, mode="rb") as fs: + file_path = storage.save(name=os.path.join(settings.UPLOAD_PATH, file_name), content=fs) + logger.info(f"upload [{file_name}] to storage({file_path}) success.") + + upload_record = models.UploadPackage.create_record( + # 后续可以考虑通过路径来判断 + module=module, + file_path=file_path, + md5=files.md5sum(name=file_abs_path), + operator="system", + source_app_code=settings.APP_CODE, + file_name=os.path.basename(file_path), + is_file_copy=True, + ) + + if module == constants.GsePackageCode.AGENT.value: + artifact_builder_class: typing.Type[base.BaseArtifactBuilder] = agent.AgentArtifactBuilder + else: + artifact_builder_class: typing.Type[base.BaseArtifactBuilder] = proxy.ProxyArtifactBuilder + + with artifact_builder_class( + initial_artifact_path=upload_record.file_path, + cert_path=options.get("cert_path"), + download_path=options.get("download_path"), + overwrite_version=options.get("overwrite_version"), + ) as builder: + builder.make() diff --git a/apps/backend/management/commands/init_official_plugins.py b/apps/backend/management/commands/init_official_plugins.py index e3e16e609..ca5c261ff 100644 --- a/apps/backend/management/commands/init_official_plugins.py +++ b/apps/backend/management/commands/init_official_plugins.py @@ -21,8 +21,8 @@ from apps.backend.plugin import tools from apps.core.files.storage import get_storage -from apps.node_man import models -from apps.utils.files import md5sum +from apps.node_man import constants, models +from apps.utils import files from common.log import logger @@ -43,61 +43,56 @@ def handle(self, *args, **options): # 1. 遍历寻找所有符合条件的插件文件 # 此处使用walk的原因,主要是考虑后续需要使用文件夹来隔离不同类型的插件,所以使用walk而非listdir - for dir_path, _, file_list in os.walk(settings.BK_OFFICIAL_PLUGINS_INIT_PATH): + for file_abs_path in files.fetch_file_paths_from_dir( + settings.BK_OFFICIAL_PLUGINS_INIT_PATH, ignored_dir_names=constants.GsePackageCode.list_member_values() + ): + file_name: str = os.path.basename(file_abs_path) + if not tarfile.is_tarfile(file_abs_path): + logger.warning("file->[%s] is not tar file, will not try to import it." % file_abs_path) + continue - for file_name in file_list: - file_abs_path = os.path.join(dir_path, file_name) + with open(file=file_abs_path, mode="rb") as fs: + file_path = storage.save(name=os.path.join(settings.UPLOAD_PATH, file_name), content=fs) - # 判断是否符合预期的文件内容 - if os.path.isdir(file_abs_path): - logger.warning("file->[%s] is dir not file, will not try to import it." % file_abs_path) - continue - - if not tarfile.is_tarfile(file_abs_path): - logger.warning("file->[%s] is not tar file, will not try to import it." % file_abs_path) - continue + # 2. 尝试导入这个文件 + with atomic(): + upload_record = models.UploadPackage.create_record( + # 后续可以考虑通过路径来判断 + module="gse_plugin", + file_path=file_path, + md5=files.md5sum(name=file_abs_path), + operator="system", + source_app_code="bk_nodeman", + file_name=os.path.basename(file_path), + is_file_copy=True, + ) - with open(file=file_abs_path, mode="rb") as fs: - file_path = storage.save(name=os.path.join(settings.UPLOAD_PATH, file_name), content=fs) - # 2. 尝试导入这个文件 - with atomic(): - upload_record = models.UploadPackage.create_record( - # 后续可以考虑通过路径来判断 - module="gse_plugin", - file_path=file_path, - md5=md5sum(name=file_abs_path), - operator="system", - source_app_code="bk_nodeman", - file_name=os.path.basename(file_path), - is_file_copy=True, + try: + # 如果是官方内置的插件,那么应该是直接发布的 + package_list = tools.create_package_records( + file_path=upload_record.file_path, + file_name=upload_record.file_name, + is_release=True, + is_template_load=True, ) + except Exception as error: + # 但是需要注意这个文件可能是已经存在的文件,会有导入失败的问题 + logger.error( + "failed to import file->[%s] for->[%s] file all records will be deleted." + % (file_abs_path, traceback.format_exc()) + ) + six.raise_from(error, error) + continue - try: - # 如果是官方内置的插件,那么应该是直接发布的 - package_list = tools.create_package_records( - file_path=upload_record.file_path, - file_name=upload_record.file_name, - is_release=True, - is_template_load=True, - ) - except Exception as error: - # 但是需要注意这个文件可能是已经存在的文件,会有导入失败的问题 - logger.error( - "failed to import file->[%s] for->[%s] file all records will be deleted." - % (file_abs_path, traceback.format_exc()) - ) - six.raise_from(error, error) - continue - - package_name_list = [ - # mysql_export->1.0.0->x86->linux - "->".join([package.pkg_name, str(package.version), package.cpu_arch, package.os]) - for package in package_list - ] - logger.info("file->[{}] import success for packages->[{}]".format(file_abs_path, package_name_list)) + package_name_list = [ + # mysql_export->1.0.0->x86->linux + "->".join([package.pkg_name, str(package.version), package.cpu_arch, package.os]) + for package in package_list + ] + logger.info("file->[{}] import success for packages->[{}]".format(file_abs_path, package_name_list)) - file_count += 1 - package_count += len(package_list) + file_count += 1 + package_count += len(package_list) logger.info( "all package under path->[%s] is import success, file_count->[%s] package_count->[%s]" diff --git a/apps/backend/plugin/views.py b/apps/backend/plugin/views.py index fac7f22e8..35eb3fdbb 100644 --- a/apps/backend/plugin/views.py +++ b/apps/backend/plugin/views.py @@ -107,7 +107,7 @@ def create_plugin_register_task(self, request): # 1. 判断是否存在需要注册的文件信息 models_queryset = models.UploadPackage.objects.filter(file_name=file_name) if not models_queryset.exists(): - raise exceptions.UploadPackageNotExistError(_("找不到请求发布的文件,请确认后重试")) + raise exceptions.FileNotExistError(_("找不到请求发布的文件,请确认后重试")) # 2. 创建一个新的task,返回任务ID job = models.Job.objects.create( @@ -884,7 +884,7 @@ def parse(self, request): models.UploadPackage.objects.filter(file_name=params["file_name"]).order_by("-upload_time").first() ) if upload_package_obj is None: - raise exceptions.UploadPackageNotExistError(_("找不到请求发布的文件,请确认后重试")) + raise exceptions.FileNotExistError(_("找不到请求发布的文件,请确认后重试")) # 获取插件中各个插件包的路径信息 package_infos = tools.list_package_infos(file_path=upload_package_obj.file_path) diff --git a/apps/backend/subscription/steps/agent_adapter/config_context/context_helper.py b/apps/backend/subscription/steps/agent_adapter/config_context/context_helper.py index 214a10cd4..da2c7bc91 100644 --- a/apps/backend/subscription/steps/agent_adapter/config_context/context_helper.py +++ b/apps/backend/subscription/steps/agent_adapter/config_context/context_helper.py @@ -49,16 +49,18 @@ def __post_init__(self): log_path: str = agent_config["log_path"] setup_path: str = agent_config["setup_path"] path_sep: str = (constants.LINUX_SEP, constants.WINDOWS_SEP)[self.host.os_type == constants.OsType.WINDOWS] + + cert_dir: str = path_sep.join([setup_path, self.node_type, "cert"]) # Agent 侧证书 - agent_tls_ca_file: str = path_sep.join([setup_path, self.node_type, "cert", "gseca.crt"]) - agent_tls_cert_file: str = path_sep.join([setup_path, self.node_type, "cert", "gse_agent.crt"]) - agent_tls_key_file: str = path_sep.join([setup_path, self.node_type, "cert", "gse_agent.key"]) + agent_tls_ca_file: str = path_sep.join([cert_dir, constants.GseCert.CA.value]) + agent_tls_cert_file: str = path_sep.join([cert_dir, constants.GseCert.AGENT_CERT.value]) + agent_tls_key_file: str = path_sep.join([cert_dir, constants.GseCert.AGENT_KEY.value]) # Proxy 侧证书 proxy_tls_ca_file: str = agent_tls_ca_file - proxy_tls_cert_file: str = path_sep.join([setup_path, self.node_type, "cert", "gse_server.crt"]) - proxy_tls_key_file: str = path_sep.join([setup_path, self.node_type, "cert", "gse_server.key"]) - proxy_tls_cli_cert_file: str = path_sep.join([setup_path, self.node_type, "cert", "gse_api_client.crt"]) - proxy_tls_cli_key_file: str = path_sep.join([setup_path, self.node_type, "cert", "gse_api_client.key"]) + proxy_tls_cert_file: str = path_sep.join([cert_dir, constants.GseCert.SERVER_CERT.value]) + proxy_tls_key_file: str = path_sep.join([cert_dir, constants.GseCert.SERVER_KEY.value]) + proxy_tls_cli_cert_file: str = path_sep.join([cert_dir, constants.GseCert.API_CLIENT_CERT.value]) + proxy_tls_cli_key_file: str = path_sep.join([cert_dir, constants.GseCert.API_CLIENT_KEY.value]) if self.host.os_type == constants.OsType.WINDOWS: # 去除引号 diff --git a/apps/backend/tests/agent/artifact_builder/__init__.py b/apps/backend/tests/agent/artifact_builder/__init__.py new file mode 100644 index 000000000..29ed269e0 --- /dev/null +++ b/apps/backend/tests/agent/artifact_builder/__init__.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +""" +TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-节点管理(BlueKing-BK-NODEMAN) available. +Copyright (C) 2017-2022 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at https://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" diff --git a/apps/backend/tests/agent/artifact_builder/test_agent.py b/apps/backend/tests/agent/artifact_builder/test_agent.py new file mode 100644 index 000000000..dc2f69455 --- /dev/null +++ b/apps/backend/tests/agent/artifact_builder/test_agent.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +""" +TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-节点管理(BlueKing-BK-NODEMAN) available. +Copyright (C) 2017-2022 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at https://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import os + +import mock +from django.conf import settings + +from apps.backend.tests.agent import utils +from apps.mock_data import utils as mock_data_utils + + +class FileSystemTestCase(utils.AgentBaseTestCase): + + OVERWRITE_VERSION = "stable" + + def pkg_checker(self, version_str: str): + """ + 安装包检查 + :param version_str: 版本 + :return: + """ + pkg_name: str = f"{self.ARTIFACT_BUILDER_CLASS.NAME}-{version_str}.tgz" + for package_os, cpu_arch in self.OS_CPU_CHOICES: + package_path: str = os.path.join( + settings.DOWNLOAD_PATH, self.ARTIFACT_BUILDER_CLASS.BASE_STORAGE_DIR, package_os, cpu_arch, pkg_name + ) + self.assertTrue(os.path.exists(package_path)) + + def test_make(self): + """测试安装包制作""" + with self.ARTIFACT_BUILDER_CLASS(initial_artifact_path=self.ARCHIVE_PATH) as builder: + builder.make() + self.pkg_checker(version_str=utils.VERSION) + + def test_make__overwrite_version(self): + """测试版本号覆盖""" + with self.ARTIFACT_BUILDER_CLASS( + initial_artifact_path=self.ARCHIVE_PATH, overwrite_version=self.OVERWRITE_VERSION + ) as builder: + builder.make() + self.pkg_checker(version_str=self.OVERWRITE_VERSION) + + +class BkRepoTestCase(FileSystemTestCase): + FILE_OVERWRITE = True + OVERWRITE_OBJ__KV_MAP = mock_data_utils.OVERWRITE_OBJ__KV_MAP + + @classmethod + def setUpClass(cls): + mock.patch("apps.core.files.storage.CustomBKRepoStorage", mock_data_utils.CustomBKRepoMockStorage).start() + super().setUpClass() diff --git a/apps/backend/tests/agent/artifact_builder/test_manage_commands.py b/apps/backend/tests/agent/artifact_builder/test_manage_commands.py new file mode 100644 index 000000000..896fdebbd --- /dev/null +++ b/apps/backend/tests/agent/artifact_builder/test_manage_commands.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +""" +TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-节点管理(BlueKing-BK-NODEMAN) available. +Copyright (C) 2017-2022 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at https://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import os +import shutil +import uuid + +import mock +from django.conf import settings +from django.core.management import call_command + +from apps.backend.tests.agent import utils +from apps.mock_data import utils as mock_data_utils +from apps.node_man import models + +from . import test_agent + + +class FileSystemImportAgentTestCase(test_agent.FileSystemTestCase): + @classmethod + def setUpTestData(cls): + cls.OVERWRITE_OBJ__KV_MAP[settings].update( + BK_OFFICIAL_PLUGINS_INIT_PATH=os.path.join(cls.TMP_DIR, uuid.uuid4().hex) + ) + super().setUpTestData() + + def setUp(self): + super().setUp() + agent_module_dir: str = os.path.join(settings.BK_OFFICIAL_PLUGINS_INIT_PATH, self.ARTIFACT_BUILDER_CLASS.NAME) + os.makedirs(agent_module_dir, exist_ok=True) + shutil.copyfile(self.ARCHIVE_PATH, os.path.join(agent_module_dir, self.ARCHIVE_NAME)) + + def test_make(self): + """测试导入命令""" + call_command("init_agents") + self.assertTrue(models.UploadPackage.objects.all().exists()) + self.pkg_checker(version_str=utils.VERSION) + + def test_make__overwrite_version(self): + """测试版本号覆盖""" + call_command("init_agents", overwrite_version=self.OVERWRITE_VERSION) + self.pkg_checker(version_str=self.OVERWRITE_VERSION) + + +class BkRepoImportAgentTestCase(FileSystemImportAgentTestCase): + FILE_OVERWRITE = True + OVERWRITE_OBJ__KV_MAP = mock_data_utils.OVERWRITE_OBJ__KV_MAP + + @classmethod + def setUpClass(cls): + mock.patch("apps.core.files.storage.CustomBKRepoStorage", mock_data_utils.CustomBKRepoMockStorage).start() + super().setUpClass() + + +class FileSystemImportProxyTestCase(FileSystemImportAgentTestCase): + pass + + +class BkRepoImportProxyTestCase(FileSystemImportProxyTestCase): + pass diff --git a/apps/backend/tests/agent/artifact_builder/test_proxy.py b/apps/backend/tests/agent/artifact_builder/test_proxy.py new file mode 100644 index 000000000..56d31726f --- /dev/null +++ b/apps/backend/tests/agent/artifact_builder/test_proxy.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +""" +TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-节点管理(BlueKing-BK-NODEMAN) available. +Copyright (C) 2017-2022 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at https://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" + +from apps.backend.tests.agent import utils + +from . import test_agent + + +class FileSystemTestCase(utils.ProxyBaseTestCase, test_agent.FileSystemTestCase): + pass + + +class BkRepoTestCase(FileSystemTestCase): + pass diff --git a/apps/backend/tests/agent/utils.py b/apps/backend/tests/agent/utils.py new file mode 100644 index 000000000..a681f02ba --- /dev/null +++ b/apps/backend/tests/agent/utils.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- +""" +TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-节点管理(BlueKing-BK-NODEMAN) available. +Copyright (C) 2017-2022 THL A29 Limited, a Tencent company. All rights reserved. +Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. +You may obtain a copy of the License at https://opensource.org/licenses/MIT +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on +an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. +""" +import os +import shutil +import tarfile +import typing +import uuid +from enum import Enum + +from django.conf import settings + +from apps.backend.agent.artifact_builder import agent, proxy +from apps.backend.subscription.steps.agent_adapter import config_templates +from apps.core.files import base, core_files_constants, storage +from apps.mock_data import common_unit +from apps.node_man import constants +from apps.utils import files + +# 测试文件根路径 +from apps.utils.enum import EnhanceEnum +from apps.utils.unittest.testcase import CustomAPITestCase + +VERSION = common_unit.plugin.PACKAGE_VERSION + + +class PathSettingOverwrite(EnhanceEnum): + EXPORT_PATH = "EXPORT_PATH" + UPLOAD_PATH = "UPLOAD_PATH" + DOWNLOAD_PATH = "DOWNLOAD_PATH" + GSE_CERT_PATH = "GSE_CERT_PATH" + + @classmethod + def _get_member__alias_map(cls) -> typing.Dict[Enum, str]: + raise NotImplementedError() + + @classmethod + def get_setting_name__path_suffix_map(cls) -> typing.Dict[str, str]: + return { + cls.EXPORT_PATH.value: "export", + cls.UPLOAD_PATH.value: "upload", + cls.DOWNLOAD_PATH.value: "download", + cls.GSE_CERT_PATH.value: "cert", + } + + +class AgentBaseTestCase(CustomAPITestCase): + TMP_DIR: str = None + PKG_NAME: str = None + OVERWRITE_VERSION: str = None + ARCHIVE_NAME: str = f"gse_agent_ce-{VERSION}.tgz" + ARCHIVE_PATH: str = None + ARCHIVE_MD5: str = None + OS_CPU_CHOICES = [ + (constants.OsType.LINUX.lower(), constants.CpuType.x86_64), + (constants.OsType.WINDOWS.lower(), constants.CpuType.x86), + ] + ARTIFACT_BUILDER_CLASS: typing.Type[agent.AgentArtifactBuilder] = agent.AgentArtifactBuilder + + FILE_OVERWRITE = True + + OVERWRITE_OBJ__KV_MAP = { + settings: { + "FILE_OVERWRITE": FILE_OVERWRITE, + "STORAGE_TYPE": core_files_constants.StorageType.FILE_SYSTEM.value, + }, + base.StorageFileOverwriteMixin: {"file_overwrite": FILE_OVERWRITE}, + } + + @classmethod + def setUpClass(cls): + + cls.TMP_DIR = files.mk_and_return_tmpdir() + cls.PKG_NAME = f"{cls.ARTIFACT_BUILDER_CLASS.NAME}-{VERSION}.tgz" + cls.ARCHIVE_PATH = os.path.join(cls.TMP_DIR, cls.ARCHIVE_NAME) + + setting_name__path_map = { + setting_name: os.path.join( + cls.TMP_DIR, PathSettingOverwrite.get_setting_name__path_suffix_map()[setting_name] + ) + for setting_name in PathSettingOverwrite.list_member_values() + } + cls.OVERWRITE_OBJ__KV_MAP = cls.OVERWRITE_OBJ__KV_MAP or {} + cls.OVERWRITE_OBJ__KV_MAP[settings] = {**cls.OVERWRITE_OBJ__KV_MAP.get(settings, {}), **setting_name__path_map} + + super().setUpClass() + + @classmethod + def setUpTestData(cls): + storage._STORAGE_OBJ_CACHE = {} + super().setUpTestData() + + def setUp(self): + # 设置请求附加参数 + self.client.common_request_data = { + "bk_app_code": settings.APP_CODE, + "bk_username": settings.SYSTEM_USE_API_ACCOUNT, + } + + for setting_name in PathSettingOverwrite.list_member_values(): + overwrite_path = os.path.join( + self.TMP_DIR, PathSettingOverwrite.get_setting_name__path_suffix_map()[setting_name] + ) + # exist_ok 目录存在直接跳过,不抛出 FileExistsError + os.makedirs(overwrite_path, exist_ok=True) + + if setting_name == PathSettingOverwrite.GSE_CERT_PATH.value: + self.gen_cert_files(overwrite_path) + elif setting_name == PathSettingOverwrite.DOWNLOAD_PATH.value: + shutil.copytree( + os.path.join(settings.BK_SCRIPTS_PATH, "gsectl"), os.path.join(overwrite_path, "gsectl") + ) + + artifact_dir = self.gen_base_artifact_files(os_cpu_choices=self.OS_CPU_CHOICES) + self.pack_pkg(artifact_dir=artifact_dir, arcname=self.ARTIFACT_BUILDER_CLASS.BASE_PKG_DIR) + self.ARCHIVE_MD5 = files.md5sum(name=self.ARCHIVE_PATH) + super().setUp() + + @classmethod + def gen_agent_bin(cls, pkg_dir: str, package_os: str): + pkg_bin_dir = os.path.join(pkg_dir, "bin") + # 创建可执行文件 + os.makedirs(pkg_bin_dir) + agent_exes: typing.List[str] = (("gse_agent",), ("gse_agent.exe", "gse_agent_daemon.exe"))[ + package_os == constants.PluginOsType.windows + ] + for exe in agent_exes: + with open(os.path.join(pkg_bin_dir, exe), "w"): + pass + + @classmethod + def gen_base_artifact_files(cls, os_cpu_choices: typing.List[typing.Tuple[str, str]]): + base_artifact_dir = os.path.join(cls.TMP_DIR, uuid.uuid4().hex, cls.ARTIFACT_BUILDER_CLASS.BASE_PKG_DIR) + for package_os, cpu_arch in os_cpu_choices: + pkg_dir = os.path.join( + base_artifact_dir, + cls.ARTIFACT_BUILDER_CLASS.PKG_PATH_DIR_TEMPL.format(os=package_os, cpu_arch=cpu_arch), + ) + cls.gen_agent_bin(pkg_dir, package_os) + + # 版本文件 + with open(os.path.join(base_artifact_dir, "VERSION"), "w", encoding="utf-8") as version_fs: + version_fs.write(VERSION) + + # changelog + with open(os.path.join(base_artifact_dir, "CHANGELOG.md"), "w", encoding="utf-8") as version_fs: + version_fs.write(f"### {VERSION}\nchange") + + # support-files + pkg_conf_env_dir = os.path.join(base_artifact_dir, "support-files", "env") + pkg_conf_tmpls_dir = os.path.join(base_artifact_dir, "support-files", "templates") + + os.makedirs(pkg_conf_env_dir, exist_ok=True) + os.makedirs(pkg_conf_tmpls_dir, exist_ok=True) + + # 写入配置模板 + with open(os.path.join(pkg_conf_tmpls_dir, "gse_agent_conf.template"), "w", encoding="utf-8") as templ_fs: + templ_fs.write(config_templates.GSE_AGENT_CONFIG_TMPL) + + return base_artifact_dir + + @classmethod + def gen_cert_files(cls, cert_path): + for cert_filename in constants.GseCert.list_member_values(): + with open(os.path.join(cert_path, cert_filename), "w", encoding="utf-8"): + pass + + @classmethod + def pack_pkg(cls, artifact_dir: str, arcname: str = "."): + # 插件打包 + with tarfile.open(cls.ARCHIVE_PATH, "w:gz") as tf: + tf.add(artifact_dir, arcname=arcname, recursive=True) + + def tearDown(self): + if os.path.exists(self.TMP_DIR): + shutil.rmtree(self.TMP_DIR) + super().tearDown() + + @classmethod + def tearDownClass(cls): + if os.path.exists(cls.TMP_DIR): + shutil.rmtree(cls.TMP_DIR) + super().tearDownClass() + + +class ProxyBaseTestCase(AgentBaseTestCase): + ARCHIVE_NAME: str = f"gse_ce-{VERSION}.tgz" + OS_CPU_CHOICES = [ + (constants.OsType.LINUX.lower(), constants.CpuType.x86_64), + ] + ARTIFACT_BUILDER_CLASS: typing.Type[proxy.ProxyArtifactBuilder] = proxy.ProxyArtifactBuilder + + def setUp(self): + super().setUp() + self.gen_base_agent_pkg() + + @classmethod + def gen_base_artifact_files(cls, os_cpu_choices: typing.List[typing.Tuple[str, str]]): + base_artifact_dir = os.path.join(cls.TMP_DIR, uuid.uuid4().hex, cls.ARTIFACT_BUILDER_CLASS.BASE_PKG_DIR) + pkg_bin_dir: str = os.path.join(base_artifact_dir, cls.ARTIFACT_BUILDER_CLASS.SERVER_BIN_DIR) + + # 创建可执行文件 + os.makedirs(pkg_bin_dir, exist_ok=True) + for svr_exe in cls.ARTIFACT_BUILDER_CLASS.PROXY_SVR_EXES: + with open(os.path.join(pkg_bin_dir, svr_exe), "w"): + pass + + # 版本文件 + with open(os.path.join(base_artifact_dir, "VERSION"), "w", encoding="utf-8") as version_fs: + version_fs.write(VERSION) + + # changelog + with open(os.path.join(base_artifact_dir, "CHANGELOG.md"), "w", encoding="utf-8") as version_fs: + version_fs.write(f"### {VERSION}\nchange") + + # support-files + pkg_conf_env_dir = os.path.join(base_artifact_dir, "support-files", "env") + pkg_conf_tmpls_dir = os.path.join(base_artifact_dir, "support-files", "templates") + + os.makedirs(pkg_conf_env_dir, exist_ok=True) + os.makedirs(pkg_conf_tmpls_dir, exist_ok=True) + + for templ_name in ["#etc#gse#gse_data_proxy.conf", "#etc#gse#gse_file_proxy.conf"]: + # 写入配置模板 + with open(os.path.join(pkg_conf_tmpls_dir, templ_name), "w", encoding="utf-8") as version_fs: + version_fs.write(config_templates.GSE_FILE_PROXY_CONFIG_TEMPL) + + return base_artifact_dir + + @classmethod + def gen_base_agent_pkg(cls): + for package_os, cpu_arch in cls.OS_CPU_CHOICES: + pkg_dir = os.path.join(cls.TMP_DIR, uuid.uuid4().hex, agent.AgentArtifactBuilder.PKG_DIR) + cls.gen_agent_bin(pkg_dir, package_os) + # 拷贝证书 + shutil.copytree(settings.GSE_CERT_PATH, os.path.join(pkg_dir, "cert")) + + pkg_path: str = os.path.join( + settings.DOWNLOAD_PATH, + agent.AgentArtifactBuilder.BASE_STORAGE_DIR, + package_os, + cpu_arch, + f"{AgentBaseTestCase.ARTIFACT_BUILDER_CLASS.NAME}-{VERSION}.tgz", + ) + os.makedirs(os.path.dirname(pkg_path), exist_ok=True) + with tarfile.open(pkg_path, "w:gz") as tf: + tf.add(pkg_dir, arcname=agent.AgentArtifactBuilder.PKG_DIR, recursive=True) + + if cls.OVERWRITE_VERSION: + shutil.copyfile( + pkg_path, + os.path.join( + os.path.dirname(pkg_path), + f"{AgentBaseTestCase.ARTIFACT_BUILDER_CLASS.NAME}-{cls.OVERWRITE_VERSION}.tgz", + ), + ) diff --git a/apps/backend/tests/components/collections/agent_new/test_run_upgrade_command.py b/apps/backend/tests/components/collections/agent_new/test_run_upgrade_command.py index 3eed69bab..71020e471 100644 --- a/apps/backend/tests/components/collections/agent_new/test_run_upgrade_command.py +++ b/apps/backend/tests/components/collections/agent_new/test_run_upgrade_command.py @@ -37,14 +37,11 @@ class RunUpgradeCommandSuccessTestCase(base.JobBaseTestCase): temp_path="/tmp", package_name="gse_client-linux-x86_64_upgrade.tgz", node_type="agent", - reload_cmd=AGENT_RELOAD_CMD_TEMPLATE.format(setup_path="/usr/local/gse", node_type="agent"), + reload_cmd=AGENT_RELOAD_CMD_TEMPLATE.format(setup_path="/usr/local/gse", node_type="agent", procs="gse_agent"), pkg_cpu_arch="x86_64", ) WINDOWS_TEST_SCRIPTS = WINDOWS_UPGRADE_CMD_TEMPLATE.format( - setup_path="c:\\gse", - temp_path="C:\\tmp", - package_name="gse_client-windows-x86_64_upgrade.tgz", - package_name_tar="gse_client-windows-x86_64_upgrade.tgz".replace("tgz", "tar"), + setup_path="c:\\gse", temp_path="C:\\tmp", package_name="gse_client-windows-x86_64_upgrade.tgz" ) TEST_SCRIPTS_MAP = {constants.OsType.WINDOWS: WINDOWS_TEST_SCRIPTS, constants.OsType.LINUX: LINUX_TEST_SCRIPTS} @@ -90,7 +87,9 @@ def tearDown(self) -> None: temp_path="/tmp", package_name="gse_agent-2.0.0.tgz", node_type="agent", - reload_cmd=AGENT_RELOAD_CMD_TEMPLATE.format(setup_path="/usr/local/gse", node_type="agent"), + reload_cmd=AGENT_RELOAD_CMD_TEMPLATE.format( + setup_path="/usr/local/gse", node_type="agent", procs="gse_agent" + ), pkg_cpu_arch="x86_64", ) for record_result in record[JobApi.fast_execute_script]: diff --git a/apps/backend/tests/management/commands/test_copy_file_to_nginx.py b/apps/backend/tests/management/commands/test_copy_file_to_nginx.py index 8eab7862d..37de378f0 100644 --- a/apps/backend/tests/management/commands/test_copy_file_to_nginx.py +++ b/apps/backend/tests/management/commands/test_copy_file_to_nginx.py @@ -17,8 +17,8 @@ from django.conf import settings from django.core.management import call_command -from apps.backend.tests.plugin import utils from apps.core.files import core_files_constants +from apps.mock_data import utils as mock_data_utils from apps.utils import files from apps.utils.unittest.testcase import CustomBaseTestCase @@ -77,5 +77,5 @@ class BkRepoCopyFileToNginxTestCase(CopyFileToNginxTestCase): @classmethod def setUpClass(cls): - mock.patch("apps.core.files.storage.CustomBKRepoStorage", utils.CustomBKRepoMockStorage).start() + mock.patch("apps.core.files.storage.CustomBKRepoStorage", mock_data_utils.CustomBKRepoMockStorage).start() super().setUpClass() diff --git a/apps/backend/tests/plugin/test_manage_commands.py b/apps/backend/tests/plugin/test_manage_commands.py index 766831a62..9a6d19a9b 100644 --- a/apps/backend/tests/plugin/test_manage_commands.py +++ b/apps/backend/tests/plugin/test_manage_commands.py @@ -14,6 +14,7 @@ from django.core.management import call_command from apps.backend.tests.plugin import utils +from apps.mock_data import utils as mock_data_utils from apps.node_man import models @@ -32,9 +33,9 @@ def test_import_command(self): class ImportCommandBkRepoTestCase(ImportCommandTestCase): - OVERWRITE_OBJ__KV_MAP = utils.OVERWRITE_OBJ__KV_MAP + OVERWRITE_OBJ__KV_MAP = mock_data_utils.OVERWRITE_OBJ__KV_MAP @classmethod def setUpClass(cls): - mock.patch("apps.core.files.storage.CustomBKRepoStorage", utils.CustomBKRepoMockStorage).start() + mock.patch("apps.core.files.storage.CustomBKRepoStorage", mock_data_utils.CustomBKRepoMockStorage).start() super().setUpClass() diff --git a/apps/backend/tests/plugin/views/test_plugin_production.py b/apps/backend/tests/plugin/views/test_plugin_production.py index 4ff7b88c4..1a2e785b7 100644 --- a/apps/backend/tests/plugin/views/test_plugin_production.py +++ b/apps/backend/tests/plugin/views/test_plugin_production.py @@ -19,6 +19,7 @@ from django.conf import settings from apps.backend.tests.plugin import utils +from apps.mock_data import utils as mock_data_utils from apps.node_man import constants, models from apps.node_man.models import Packages, ProcControl from apps.utils import files @@ -235,11 +236,11 @@ def test_parse__low_or_same_version(self): class BkRepoTestCase(FileSystemTestCase): FILE_OVERWRITE = True - OVERWRITE_OBJ__KV_MAP = utils.OVERWRITE_OBJ__KV_MAP + OVERWRITE_OBJ__KV_MAP = mock_data_utils.OVERWRITE_OBJ__KV_MAP @classmethod def setUpClass(cls): - mock.patch("apps.core.files.storage.CustomBKRepoStorage", utils.CustomBKRepoMockStorage).start() + mock.patch("apps.core.files.storage.CustomBKRepoStorage", mock_data_utils.CustomBKRepoMockStorage).start() super().setUpClass() def upload_plugin(self, file_local_path: Optional[str] = None) -> Dict[str, Any]: diff --git a/apps/mock_data/utils.py b/apps/mock_data/utils.py index a7b6b1884..68ebc40cd 100644 --- a/apps/mock_data/utils.py +++ b/apps/mock_data/utils.py @@ -12,7 +12,9 @@ from typing import Any, Callable, Dict, Optional, Union import mock +from django.conf import settings +from apps.core.files import base, core_files_constants, storage from apps.utils.enum import EnhanceEnum from apps.utils.unittest.base import CallRecorder @@ -62,3 +64,65 @@ def __init__(self): def generate_magic_mock(cls, mock_return_obj: Optional[MockReturn]): mock_return_obj = mock_return_obj or MockReturn(return_type=MockReturnType.RETURN_VALUE.value, return_obj=None) return mock.MagicMock(**{mock_return_obj.return_type: mock_return_obj.return_obj}) + + +class CustomBKRepoMockStorage(storage.CustomBKRepoStorage): + mock_storage: storage.AdminFileSystemStorage = None + + def __init__( + self, + root_path=None, + username=None, + password=None, + project_id=None, + bucket=None, + endpoint_url=None, + file_overwrite=None, + ): + self.mock_storage = storage.AdminFileSystemStorage(file_overwrite=file_overwrite) + super().__init__( + root_path=root_path, + username=username, + password=password, + project_id=project_id, + bucket=bucket, + endpoint_url=endpoint_url, + file_overwrite=file_overwrite, + ) + + def path(self, name): + return self.mock_storage.path(name) + + def _open(self, name, mode="rb"): + return self.mock_storage._open(name, mode) + + def _save(self, name, content): + return self.mock_storage._save(name, content) + + def exists(self, name): + return self.mock_storage.exists(name) + + def size(self, name): + return self.mock_storage.size(name) + + def url(self, name): + return self.mock_storage.url(name) + + def delete(self, name): + return self.mock_storage.delete(name) + + +OVERWRITE_OBJ__KV_MAP = { + settings: { + "FILE_OVERWRITE": True, + "STORAGE_TYPE": core_files_constants.StorageType.BLUEKING_ARTIFACTORY.value, + "BKREPO_USERNAME": "username", + "BKREPO_PASSWORD": "blueking", + "BKREPO_PROJECT": "project", + "BKREPO_BUCKET": "private", + "BKREPO_PUBLIC_BUCKET": "public", + "BKREPO_ENDPOINT_URL": "http://127.0.0.1", + }, + CustomBKRepoMockStorage: {"file_overwrite": True}, + base.StorageFileOverwriteMixin: {"file_overwrite": True}, +} diff --git a/apps/node_man/constants.py b/apps/node_man/constants.py index 831379762..28bb61153 100644 --- a/apps/node_man/constants.py +++ b/apps/node_man/constants.py @@ -483,6 +483,10 @@ def _get_member__alias_map(cls) -> Dict[Enum, str]: f"_(?P({'|'.join(map(str, CPU_TUPLE))})?$)" ) +AGENT_PATH_RE = re.compile( + f"agent_(?P({'|'.join(map(str, PLUGIN_OS_TUPLE))}))" f"_(?P({'|'.join(map(str, CPU_TUPLE))})?$)" +) + # TODO: 部署方式,后续确认 DEPLOY_TYPE_TUPLE = ("package", "config", "agent") DEPLOY_TYPE_CHOICES = tuple_choices(DEPLOY_TYPE_TUPLE) @@ -525,6 +529,12 @@ def _get_member__alias_map(cls) -> Dict[Enum, str]: QUERY_CLOUD_LIMIT = 200 QUERY_HOST_SERVICE_TEMPLATE_LIMIT = 200 VERSION_PATTERN = re.compile(r"[vV]?(\d+\.){1,5}\d+(-rc\d)?$") +# 语义化版本正则,参考:https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string +SEMANTIC_VERSION_PATTERN = re.compile( + r"^(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)" + r"(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?" + r"(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$" +) WINDOWS_PORT = 445 WINDOWS_ACCOUNT = "Administrator" LINUX_ACCOUNT = "root" @@ -854,6 +864,52 @@ def _get_member__alias_map(cls) -> Dict[Enum, str]: return {cls.PROXY: "Proxy Agent 模式", cls.AGENT: "Agent 模式"} +class GsePackageCode(EnhanceEnum): + """安装包代号""" + + PROXY = "gse_proxy" + AGENT = "gse_agent" + + @classmethod + def _get_member__alias_map(cls) -> Dict[Enum, str]: + return {cls.PROXY: _("2.0 Proxy Agent 安装包代号"), cls.AGENT: _("2.0 Agent 安装包代号")} + + +class GsePackageDir(EnhanceEnum): + """安装包打包根路径""" + + PROXY = "proxy" + AGENT = "agent" + + @classmethod + def _get_member__alias_map(cls) -> Dict[Enum, str]: + return {cls.PROXY: _("2.0 Proxy 打包根路径"), cls.AGENT: _("2.0 Agent 打包根路径")} + + +class GseCert(EnhanceEnum): + """证书""" + + CA = "gseca.crt" + SERVER_CERT = "gse_server.crt" + SERVER_KEY = "gse_server.key" + AGENT_CERT = "gse_agent.crt" + AGENT_KEY = "gse_agent.key" + API_CLIENT_CERT = "gse_api_client.crt" + API_CLIENT_KEY = "gse_api_client.key" + + @classmethod + def _get_member__alias_map(cls) -> Dict[Enum, str]: + return { + cls.CA: _("证书 CA 内容配置"), + cls.SERVER_CERT: _("Server 侧 CERT 内容配置"), + cls.SERVER_KEY: _("Server 侧 KEY 内容配置"), + cls.AGENT_CERT: _("API 侧 CERT 内容配置, 用于其他服务调用 GSE"), + cls.AGENT_KEY: _("API 侧 KEY 内容配置, 用于其他服务调用 GSE"), + cls.API_CLIENT_CERT: _("Agent 侧 CERT 内容配置, 用于 Agent 链路"), + cls.API_CLIENT_KEY: _("Agent 侧 KEY 内容配置, 用于 Agent 链路"), + } + + ######################################################################################################## # CMDB ######################################################################################################## diff --git a/apps/node_man/models.py b/apps/node_man/models.py index 737afab8f..7407b1327 100644 --- a/apps/node_man/models.py +++ b/apps/node_man/models.py @@ -847,7 +847,7 @@ def install_channel_id__host_objs_map( cls, install_channel_ids: Optional[List[int]] = None ) -> Dict[int, List["Host"]]: # 从数据库 Host 表中批量查询安装通道跳板机器的主机对象 - if install_channel_ids: + if install_channel_ids is not None: install_channels = cls.objects.filter(id__in=install_channel_ids) else: install_channels = cls.objects.all() @@ -864,6 +864,10 @@ def install_channel_id__host_objs_map( filter_key = "inner_ipv6" if basic.is_v6(jump_server_ip) else "inner_ip" filter_host_conditions.append(Q(**{"bk_cloud_id": cloud_id, filter_key: jump_server_ip})) + # 如果筛选条件为空,直接返回 + if not filter_host_conditions: + return result + # 得出跳板机的主机对象 hosts = Host.objects.filter(bk_addressing=constants.CmdbAddressingType.STATIC.value).filter( reduce(operator.or_, filter_host_conditions) diff --git a/apps/utils/files.py b/apps/utils/files.py index b3fb61986..af089fdbb 100644 --- a/apps/utils/files.py +++ b/apps/utils/files.py @@ -13,6 +13,7 @@ import ntpath import os import posixpath +import stat import uuid from typing import IO, Any, Callable, List, Optional @@ -188,3 +189,14 @@ def fetch_file_paths_from_dir( file_paths.append(os.path.join(child_dir_path, file_name)) return file_paths + + +def make_executable(name: str): + """ + 为文件授予可执行权限 + refer: https://stackoverflow.com/questions/12791997/how-do-you-do-a-simple-chmod-x-from-within-python + :param name: + :return: + """ + st = os.stat(name) + os.chmod(name, st.st_mode | stat.S_IEXEC) diff --git a/config/default.py b/config/default.py index 0aaeb3b55..6589a7b7d 100644 --- a/config/default.py +++ b/config/default.py @@ -647,6 +647,7 @@ def get_standard_redis_mode(cls, config_redis_mode: str, default: Optional[str] # 管控平台平台版本 GSE_VERSION = env.GSE_VERSION +GSE_CERT_PATH = env.GSE_CERT_PATH # agent 安装路径配置 GSE_AGENT_HOME = os.getenv("BKAPP_GSE_AGENT_HOME") or "/usr/local/gse" GSE_AGENT_LOG_DIR = os.getenv("BKAPP_GSE_AGENT_LOG_DIR") or "/var/log/gse" diff --git a/env/__init__.py b/env/__init__.py index fdf537a4f..6a7348327 100644 --- a/env/__init__.py +++ b/env/__init__.py @@ -69,7 +69,9 @@ # 蓝鲸管控平台 # =============================================================================== # 平台版本 -GSE_VERSION = get_type_env(key="GSE_VERSION", default=constants.GseVersion.V1.value) +GSE_VERSION = get_type_env(key="GSE_VERSION", default=constants.GseVersion.V1.value, _type=str) + +GSE_CERT_PATH = get_type_env(key="GSE_CERT_PATH", default="/data/bkce/cert", _type=str) # =============================================================================== diff --git a/official_plugin/gse_agent/.gitkeep b/official_plugin/gse_agent/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/official_plugin/gse_proxy/.gitkeep b/official_plugin/gse_proxy/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/support-files/kubernetes/helm/bk-nodeman/Chart.lock b/support-files/kubernetes/helm/bk-nodeman/Chart.lock index 4e371ddd6..f50397899 100644 --- a/support-files/kubernetes/helm/bk-nodeman/Chart.lock +++ b/support-files/kubernetes/helm/bk-nodeman/Chart.lock @@ -1,18 +1,18 @@ dependencies: - name: common repository: https://charts.bitnami.com/bitnami - version: 1.16.0 + version: 1.17.1 - name: mysql repository: https://charts.bitnami.com/bitnami - version: 8.8.27 + version: 9.4.5 - name: redis repository: https://charts.bitnami.com/bitnami - version: 16.5.4 + version: 16.13.2 - name: rabbitmq repository: https://charts.bitnami.com/bitnami - version: 8.30.1 + version: 11.2.2 - name: nginx-ingress-controller repository: https://charts.bitnami.com/bitnami - version: 9.1.12 -digest: sha256:ed5d1bccd3c41a765697a425d281ee585080aa9cfdbe17e6e531281903d27365 -generated: "2022-06-09T16:02:09.176183862+08:00" + version: 9.3.24 +digest: sha256:f7eadd0f18f9a44bb22d5f3fd433b4708637c5ab6c9d9058c922c2dce8c030f8 +generated: "2022-12-20T17:05:24.797411+08:00" diff --git a/support-files/kubernetes/helm/bk-nodeman/Chart.yaml b/support-files/kubernetes/helm/bk-nodeman/Chart.yaml index 1bc83cfc9..276a85e10 100644 --- a/support-files/kubernetes/helm/bk-nodeman/Chart.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/Chart.yaml @@ -29,18 +29,18 @@ dependencies: version: 1.x.x repository: https://charts.bitnami.com/bitnami - name: mysql - version: 8.8.27 + version: 9.x.x repository: https://charts.bitnami.com/bitnami condition: mysql.enabled - name: redis - version: 16.5.4 + version: 16.x.x repository: https://charts.bitnami.com/bitnami condition: redis.enabled - name: rabbitmq - version: 8.30.1 + version: 11.x.x repository: https://charts.bitnami.com/bitnami condition: rabbitmq.enabled - name: nginx-ingress-controller - version: 9.1.12 + version: 9.x.x repository: https://charts.bitnami.com/bitnami condition: nginx-ingress-controller.enabled diff --git a/support-files/kubernetes/helm/bk-nodeman/README.md b/support-files/kubernetes/helm/bk-nodeman/README.md index 18a4efd5b..0ece2225e 100644 --- a/support-files/kubernetes/helm/bk-nodeman/README.md +++ b/support-files/kubernetes/helm/bk-nodeman/README.md @@ -46,39 +46,39 @@ $ helm uninstall bk-nodeman -n ### Charts 全局设置 -| 参数 | 描述 | 默认值 | -| ------------------------- | ----------------------------------------------- | ------------------------------------------------------- | +| 参数 | 描述 | 默认值 | +|---------------------------|-------------------------------------------------|---------------------------------------------------------| | `global.imageRegistry` | Global Docker image registry | `""` | | `global.imagePullSecrets` | Global Docker registry secret names as an array | `[]` (does not add image pull secrets to deployed pods) | | `global.storageClass` | Global storage class for dynamic provisioning | `""` | -| `global.bkDomain` | 蓝鲸主域名 | `example.com` | -| `global.bkDomain` | 蓝鲸主域名访问协议 | `http` | +| `global.bkDomain` | 蓝鲸主域名 | `example.com` | +| `global.bkDomain` | 蓝鲸主域名访问协议 | `http` | ### Kubernetes 组件公共配置 下列参数用于配置 Kubernetes 组件的公共属性,一份配置作用到每个组件 -| 参数 | 描述 | 默认值 | -| -------------------- | ------------------------------------------------------------ | ------ | -| `nameOverride` | String to partially override common.names.fullname template (will maintain the release name) | `""` | -| `fullnameOverride` | String to fully override common.names.fullname template | `""` | -| `podAnnotations` | Pod annotations | `{}` | -| `commonLabels` | Common labels to add to all bk-nodeman resources. Evaluated as a template | `{}` | -| `commonAnnotations` | Common annotations to add to all bk-nodeman resources . Evaluated as a template | `{}` | -| `podSecurityContext` | A security context defines privilege and access control settings for a Pod or Container. | `{}` | -| `securityContext` | A security context defines privilege and access control settings for a Pod or Container. | `{}` | -| `nodeSelector` | Node labels for all pods assignment | `{}` | -| `tolerations` | Tolerations for all pods assignment | `[]` | -| `volumes` | Optionally specify extra list of additional volumes to the bk-nodeman pod(s) | `[]` | -| `volumeMounts` | Optionally specify extra list of additional volumeMounts for the bk-nodeman secondary container(s) | `[]` | -| `affinity` | Affinity for pod assignment (evaluated as a template) | `{}` | +| 参数 | 描述 | 默认值 | +|----------------------|----------------------------------------------------------------------------------------------------|------| +| `nameOverride` | String to partially override common.names.fullname template (will maintain the release name) | `""` | +| `fullnameOverride` | String to fully override common.names.fullname template | `""` | +| `podAnnotations` | Pod annotations | `{}` | +| `commonLabels` | Common labels to add to all bk-nodeman resources. Evaluated as a template | `{}` | +| `commonAnnotations` | Common annotations to add to all bk-nodeman resources . Evaluated as a template | `{}` | +| `podSecurityContext` | A security context defines privilege and access control settings for a Pod or Container. | `{}` | +| `securityContext` | A security context defines privilege and access control settings for a Pod or Container. | `{}` | +| `nodeSelector` | Node labels for all pods assignment | `{}` | +| `tolerations` | Tolerations for all pods assignment | `[]` | +| `volumes` | Optionally specify extra list of additional volumes to the bk-nodeman pod(s) | `[]` | +| `volumeMounts` | Optionally specify extra list of additional volumeMounts for the bk-nodeman secondary container(s) | `[]` | +| `affinity` | Affinity for pod assignment (evaluated as a template) | `{}` | ### ServiceAccount 配置 -| 参数 | 描述 | 默认值 | -| ---------------------------- | ------------------------------------------------------------ | ------ | -| `serviceAccount.annotations` | Annotations for service account | `{}` | -| `serviceAccount.create` | If true, create a service account | `true` | +| 参数 | 描述 | 默认值 | +|------------------------------|-------------------------------------------------------------------------------------------------------------------------|--------| +| `serviceAccount.annotations` | Annotations for service account | `{}` | +| `serviceAccount.create` | If true, create a service account | `true` | | `serviceAccount.name` | The name of the service account to use. If not set and create is true, a name is generated using the fullname template. | `""` | ### 镜像通用配置 @@ -91,45 +91,45 @@ $ helm uninstall bk-nodeman -n * `k8sWaitFor` * `nginx` -| 参数 | 描述 | 默认值 | -| -------------------------------- | ------------ | ------------------------------------- | -| `images..registry` | 镜像仓库 | 详见 `values.yaml` 中各个镜像的默认值 | -| `images..repository` | 镜像名称 | 详见 `values.yaml` 中各个镜像的默认值 | +| 参数 | 描述 | 默认值 | +|----------------------------------|--------|----------------------------| +| `images..registry` | 镜像仓库 | 详见 `values.yaml` 中各个镜像的默认值 | +| `images..repository` | 镜像名称 | 详见 `values.yaml` 中各个镜像的默认值 | | `images..pullPolicy` | 镜像拉取策略 | 详见 `values.yaml` 中各个镜像的默认值 | -| ``images..tag`` | 镜像 tag | 详见 `values.yaml` 中各个镜像的默认值 | +| ``images..tag`` | 镜像 tag | 详见 `values.yaml` 中各个镜像的默认值 | ### nginx-ingress-controller 配置 相关配置请参考 [bitnami/nginx-ingress-controller](https://github.com/bitnami/charts/tree/master/bitnami/) -| 参数 | 描述 | 默认值 | -| ------------------------------------------------- | ------------------------------------------------------------ | ----------- | -| `nginx-ingress-controller.enabled` | 是否部署 nginx ingress controller | `false` | -| `nginx-ingress-controller.kind` | Install as Deployment or DaemonSet | `DaemonSet` | +| 参数 | 描述 | 默认值 | +|---------------------------------------------------|----------------------------------------------------------------------------------|-------------| +| `nginx-ingress-controller.enabled` | 是否部署 nginx ingress controller | `false` | +| `nginx-ingress-controller.kind` | Install as Deployment or DaemonSet | `DaemonSet` | | `nginx-ingress-controller.daemonset.useHostPort` | If `kind` is `DaemonSet`, this will enable `hostPort` for `TCP/80` and `TCP/443` | `true` | -| `nginx-ingress-controller.defaultBackend.enabled` | nginx ingress controller 默认 backend | `false` | +| `nginx-ingress-controller.defaultBackend.enabled` | nginx ingress controller 默认 backend | `false` | ### ingress 配置 -| 参数 | 描述 | 默认值 | -| --------------------- | ------------------------------------------------------------ | ----------------------- | -| `ingress.enabled` | 是否创建 ingress | `true` | -| `ingress.className` | ingress 类 | `nginx` | -| `ingress.annotations` | ingress 标注 | 详见 `values.yaml` | -| `ingress.hostname` | 访问域名 | `bknodeman.example.com` | -| `ingress.paths` | 转发规则 | 详见 `values.yaml` | -| `ingress.selfSigned` | Create a TLS secret for this ingress record using self-signed certificates generated by Helm | `false` | -| `ingress.tls` | Enable TLS configuration for the host defined at `ingress.hostname` parameter | `false` | +| 参数 | 描述 | 默认值 | +|-----------------------|-------------------------------------------------------------------------------------------------------|-------------------------| +| `ingress.enabled` | 是否创建 ingress | `true` | +| `ingress.className` | ingress 类 | `nginx` | +| `ingress.annotations` | ingress 标注 | 详见 `values.yaml` | +| `ingress.hostname` | 访问域名 | `bknodeman.example.com` | +| `ingress.paths` | 转发规则 | 详见 `values.yaml` | +| `ingress.selfSigned` | Create a TLS secret for this ingress record using self-signed certificates generated by Helm | `false` | +| `ingress.tls` | Enable TLS configuration for the host defined at `ingress.hostname` parameter | `false` | | `ingress.extraPaths` | An array with additional arbitrary paths that may need to be added to the ingress under the main host | `[]` | -| `ingress.extraTls` | TLS configuration for additional hostname(s) to be covered with this ingress record | `[]` | -| `ingress.secrets` | Custom TLS certificates as secrets | `[]` | +| `ingress.extraTls` | TLS configuration for additional hostname(s) to be covered with this ingress record | `[]` | +| `ingress.secrets` | Custom TLS certificates as secrets | `[]` | ### 模块配置 -| 参数 | 描述 | 默认值 | -| -------------------- | ------------------------------------------------------------ | ------- | -| `saas.enabled` | 是否启用 SaaS | `true` | -| `backend.enabled` | 是否启用后台 | `true` | +| 参数 | 描述 | 默认值 | +|----------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------| +| `saas.enabled` | 是否启用 SaaS | `true` | +| `backend.enabled` | 是否启用后台 | `true` | | `backend.miniDeploy` | 是否启用后台最小化部署
如果管理规模较大,建议该值为 `true`,以保证可用性
不同取值所启动的 worker 服务:
**`true`**: `backend.commonWorker` `commonPipelineWorker`
**`false`**:
`backend.dworker`
`backend.bworker`
`backend.baworker`
`backend.pworker`
`backend.psworker`
`backend.paworker` | `false` | ### 服务通用配置 @@ -153,20 +153,20 @@ $ helm uninstall bk-nodeman -n - `backend.syncProcess` - `backend.resourceWatch` -| 参数 | 描述 | 默认值 | -| -------------------- | -------------------------------------- | ------------------ | -| `enabled` | 是否启用服务 | `true` | -| `resources.limits` | The resources limits for containers | `{}` | -| `resources.requests` | The requested resources for containers | `{}` | -| `replicaCount` | 服务实例数量 | `1` | -| `command` | 启动命令 | 详见 `values.yaml` | +| 参数 | 描述 | 默认值 | +|----------------------|----------------------------------------|------------------| +| `enabled` | 是否启用服务 | `true` | +| `resources.limits` | The resources limits for containers | `{}` | +| `resources.requests` | The requested resources for containers | `{}` | +| `replicaCount` | 服务实例数量 | `1` | +| `command` | 启动命令 | 详见 `values.yaml` | ### 服务配置 下列参数表示 bk-nodeman 服务除去 `服务通用配置` 的其他配置 -| 参数 | 描述 | 默认值 | -| ------------------------------ | ---------------------------- | ----------- | +| 参数 | 描述 | 默认值 | +|--------------------------------|------------------------------|-------------| | `saas.api.service.type` | SaaS API Service Type | `ClusterIP` | | `saas.api.service.port` | SaaS API Service Port | `10300` | | `saas.web.service.type` | SaaS Web Service Type | `ClusterIP` | @@ -179,11 +179,11 @@ $ helm uninstall bk-nodeman -n 默认将部署 Redis,如果不需要可以关闭。 相关配置请参考 [bitnami/redis](https://github.com/bitnami/charts/blob/master/bitnami/redis) -| 参数 | 描述 | 默认值 | -| --------------------- | ------------------------------------------------------------ | ------------ | +| 参数 | 描述 | 默认值 | +|-----------------------|-----------------------------------------------------------|--------------| | `redis.enabled` | 是否部署 Redis。如果需要使用外部 Redis,设置为 `false` 并配置 `externalRedis` | `true` | -| `redis.auth.enabled` | 是否开启认证 | `true` | -| `redis.auth.password` | Redis 密码 | `bk-nodeman` | +| `redis.auth.enabled` | 是否开启认证 | `true` | +| `redis.auth.password` | Redis 密码 | `bk-nodeman` | > 如果需要持久化 redis 数据,请参考 [bitnami/redis](https://github.com/bitnami/charts/blob/master/bitnami/redis) 配置存储卷 @@ -203,15 +203,15 @@ externalRedis: 默认将部署 MySQL,如果不需要可以关闭。 相关配置请参考 [bitnami/mysql](https://github.com/bitnami/charts/blob/master/bitnami/mysql) -| 参数 | 描述 | 默认值 | -| ---------------------------------------- | ------------------------------------------------------------ | ------------------ | -| `mysql.enabled` | 是否部署 MySQL。如果需要使用外部数据库,设置为 `false` 并配置 `externalMySQL` | `true` | -| `mysql.auth.rootPassword` | `root` 密码 | `bk-nodeman` | -| `mysql.auth.database` | 数据库名称 | `bk-nodeman` | -| `mysql.auth.username` | 数据库用户名 | `bk-nodeman` | -| `mysql.auth.password` | 数据库密码 | `bk-nodeman` | -| `mysql.initdbScripts.grant_user_pms.sql` | 为 `mysql.auth.username` 授权 | 详见 `values.yaml` | -| `primary.configuration` | 在默认配置的基础上,调整字符集及 `max_allowed_packet` | 详见 `values.yaml` | +| 参数 | 描述 | 默认值 | +|------------------------------------------|--------------------------------------------------------|------------------| +| `mysql.enabled` | 是否部署 MySQL。如果需要使用外部数据库,设置为 `false` 并配置 `externalMySQL` | `true` | +| `mysql.auth.rootPassword` | `root` 密码 | `bk-nodeman` | +| `mysql.auth.database` | 数据库名称 | `bk-nodeman` | +| `mysql.auth.username` | 数据库用户名 | `bk-nodeman` | +| `mysql.auth.password` | 数据库密码 | `bk-nodeman` | +| `mysql.initdbScripts.grant_user_pms.sql` | 为 `mysql.auth.username` 授权 | 详见 `values.yaml` | +| `primary.configuration` | 在默认配置的基础上,调整字符集及 `max_allowed_packet` | 详见 `values.yaml` | > 如果需要持久化数据库数据,请参考 [bitnami/mysql](https://github.com/bitnami/charts/blob/master/bitnami/mysql) 配置存储卷 @@ -232,12 +232,12 @@ externalMySQL: 默认将部署 RabbitMQ,如果不需要可以关闭。 相关配置请参考 [bitnami/rabbitmq](https://github.com/bitnami/charts/blob/master/bitnami/rabbitmq) -| 参数 | 描述 | 默认值 | -| ----------------------------- | ------------------------------------------------------------ | ------------------ | -| `rabbitmq.enabled` | 是否部署 RabbitMQ。如果需要使用外部 RabbitMQ,设置为 `false` 并配置 `externalRabbitMQ` | `true` | -| `rabbitmq.auth.username` | 用户名 | `bk-nodeman` | -| `rabbitmq.auth.password` | 密码 | `bk-nodeman` | -| `rabbitmq.extraConfiguration` | 为 `vhost=bk-nodeman`授权 | 详见 `values.yaml` | +| 参数 | 描述 | 默认值 | +|-------------------------------|--------------------------------------------------------------------|------------------| +| `rabbitmq.enabled` | 是否部署 RabbitMQ。如果需要使用外部 RabbitMQ,设置为 `false` 并配置 `externalRabbitMQ` | `true` | +| `rabbitmq.auth.username` | 用户名 | `bk-nodeman` | +| `rabbitmq.auth.password` | 密码 | `bk-nodeman` | +| `rabbitmq.extraConfiguration` | 为 `vhost=bk-nodeman`授权 | 详见 `values.yaml` | > 如果需要持久化 RabbitMQ 数据,请参考 [bitnami/rabbitmq](https://github.com/bitnami/charts/blob/master/bitnami/rabbitmq) 配置存储卷 @@ -256,25 +256,40 @@ externalRabbitMQ: ### 第三方依赖配置 -| 参数 | 描述 | 默认值 | -| ------------------- | ------------------------------------------------------------ | ------------------------------- | -| `bkPaasUrl` | 蓝鲸 PaaS url(浏览器访问蓝鲸入口) | `http://example.com` | -| `bkLoginUrl` | 蓝鲸 Login url(浏览器跳转登录用的URL前缀) | `http://example.com/login` | -| `bkComponentApiUrl` | 蓝鲸 ESB url,注意集群内外都是统一域名。集群内可以配置域名解析到内网ip | `http://bkapi.example.com` | -| `bkNodemanUrl` | 节点管理浏览器访问地址 | `http://bknodeman.example.com` | -| `bkNodemanApiUrl` | 节点管理后台访问地址 | `http://bk-nodeman-backend-api` | -| `bkJobUrl` | 蓝鲸作业平台浏览器访问地址 | `http://job.example.com` | -| `bkCmdbUrl` | 蓝鲸配置平台浏览器访问地址 | `http://cmdb.example.com` | -| `bkIamUrl` | 蓝鲸权限中心 SaaS 地址 | `http://bkiam.example.com` | -| `bkIamApiUrl` | 蓝鲸权限中心后台 API 地址 | `http://bkiam-api.example.com` | +| 参数 | 描述 | 默认值 | +|---------------------|--------------------------------------------------------|---------------------------------| +| `bkPaasUrl` | 蓝鲸 PaaS url(浏览器访问蓝鲸入口) | `http://example.com` | +| `bkLoginUrl` | 蓝鲸 Login url(浏览器跳转登录用的URL前缀) | `http://example.com/login` | +| `bkComponentApiUrl` | 蓝鲸 ESB url,注意集群内外都是统一域名。集群内可以配置域名解析到内网ip | `http://bkapi.example.com` | +| `bkNodemanUrl` | 节点管理浏览器访问地址 | `http://bknodeman.example.com` | +| `bkNodemanApiUrl` | 节点管理后台访问地址 | `http://bk-nodeman-backend-api` | +| `bkJobUrl` | 蓝鲸作业平台浏览器访问地址 | `http://job.example.com` | +| `bkCmdbUrl` | 蓝鲸配置平台浏览器访问地址 | `http://cmdb.example.com` | +| `bkIamUrl` | 蓝鲸权限中心 SaaS 地址 | `http://bkiam.example.com` | +| `bkIamApiUrl` | 蓝鲸权限中心后台 API 地址 | `http://bkiam-api.example.com` | | `bkRepoUrl` | 蓝鲸制品库浏览器访问域名和后台 API http://bkiam-api.example.com 域名同一个 | `http://bkrepo.example.com` | -### bk-nodman 系统配置 + +### GSE 证书配置 + +如需使用 `Agent 包管理` 功能,则必须配置 GSE 证书 + +| 参数 | 描述 | 默认值 | +|------------------------|----------------------------------------|------| +| gseCert.ca | 证书 CA 内容配置(base64) | `""` | +| gseCert.cert | Server 侧 CERT 内容配置(base64) | `""` | +| gseCert.key | Server 侧 KEY 内容配置(base64) | `""` | +| gseCert.apiClient.cert | API 侧 CERT 内容配置, 用于其他服务调用 GSE(base64) | `""` | +| gseCert.apiClient.key | API 侧 KEY 内容配置, 用于其他服务调用 GSE(base64) | `""` | +| gseCert.agent.cert | Agent 侧 CERT 内容配置, 用于 Agent 链路(base64) | `""` | +| gseCert.agent.key | Agent 侧 KEY 内容配置, 用于 Agent 链路(base64) | `""` | + +### bk-nodeman 系统配置 用于生成运行环境变量,具体参考:`support-files/kubernetes/helm/bk-nodeman/templates/configmaps/env-configmap.yaml` -| 参数 | 描述 | 默认值 | -| ------------------------------------- |-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| ------------------------------ | +| 参数 | 描述 | 默认值 | +|---------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------| | `config.appCode` | app code | `bk_nodeman` | | `config.appSecret` | app secret | `""` | | `config.bkAppRunEnv` | 运行环境,ce / ee / ieod,影响 gse 端口等配置 | `ce` | @@ -297,6 +312,7 @@ externalRabbitMQ: | `config.bkAppNodemanCallbackUrl` | 节点管理自身模块依赖,后台内网回调地址,渲染时为空取 `{{ .Values.bkNodemanUrl }}/backend` | `""` | | `config.bkAppNodemanOuterCallbackUrl` | 节点管理自身模块依赖,后台外网回调地址,渲染时为空取 `{{ .Values.bkNodemanUrl }}/backend` | `""` | | `config.gseVersion` | 蓝鲸管控平台版本,默认为 `V1`,可选:`V1` `V2` | `V1` | +| `config.gseCertPath` | GSE 本地证书路径,渲染时为空默认取 `/data/bk{{ .Values.config.bkAppRunEnv }}/cert` | `""` | | `config.gseEnableSvrDisCovery` | 蓝鲸管控平台 Agent,AgentXXDir 仅在初次部署有效,后续可以在页面「全局配置」维护。是否启用 GSE 服务探测,默认为 `true` | `true` | | `config.bkAppGseZkHost` | 蓝鲸管控平台 Agent,zk hosts 信息,host:port,多个 hosts 以 `,` 分隔
⚠️ ZK hosts 将作为 Agent 配置,需要保证 Agent 可访问,所以不能使用 k8s service 信息 进行配置
如果 zk 通过 k8s 部署,建议通过 NodePort 等方式暴露服务,使用 NodeIP:NodePort 进行配置 | `127.0.0.1:2181` | | `config.bkAppGseZkAuth` | 蓝鲸管控平台 Agent,ZK 认证信息,用户名:密码 | `bkzk:zkpass` | @@ -332,12 +348,12 @@ externalRabbitMQ: > > 优先级:内置环境变量 < extraEnvVarsCM < extraEnvVarsSecret < extraEnvVars < backendExtraEnvVars (仅后台) -| 参数 | 描述 | 默认值 | -| --------------------- | ------------------ | ------ | -| `extraEnvVarsCM` | 额外的 ConfigMap | `""` | -| `extraEnvVarsSecret` | 额外的 Secret | `""` | -| `extraEnvVars` | 额外的环境变量 | `[]` | -| `backendExtraEnvVars` | 额外的后台环境变量 | `[]` | +| 参数 | 描述 | 默认值 | +|-----------------------|---------------|------| +| `extraEnvVarsCM` | 额外的 ConfigMap | `""` | +| `extraEnvVarsSecret` | 额外的 Secret | `""` | +| `extraEnvVars` | 额外的环境变量 | `[]` | +| `backendExtraEnvVars` | 额外的后台环境变量 | `[]` | ## 配置案例 & 建议 diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/NOTES.txt b/support-files/kubernetes/helm/bk-nodeman/templates/NOTES.txt index c3b4ff10f..fcc417bfc 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/NOTES.txt +++ b/support-files/kubernetes/helm/bk-nodeman/templates/NOTES.txt @@ -4,8 +4,10 @@ Your release is named {{ .Release.Name }}. 如果集群中已经安装了 IngressController,那么可以通过以下地址访问节点管理: {{- if .Values.ingress.hostname }} +export BK_NODEMAN_URL="{{ .Values.global.bkDomainScheme }}://{{ .Values.ingress.hostname }}" SaaS: {{ .Values.global.bkDomainScheme }}://{{ .Values.ingress.hostname }} {{- else }} +export BK_NODEMAN_URL="{{ .Values.global.bkDomainScheme }}://bknodeman.{{ .Values.global.bkDomain }}" SaaS: {{ .Values.global.bkDomainScheme }}://bknodeman.{{ .Values.global.bkDomain }} {{- end }} @@ -30,7 +32,7 @@ SaaS: {{ .Values.global.bkDomainScheme }}://bknodeman.{{ .Values.global.bkDomain - 获取第一个存活 POD NAME # 后续操作依赖该变量 -export FIRST_RUNNING_POD=$(kubectl get pods \ +export FIRST_RUNNING_POD=$(kubectl get pods -n {{ .Release.Namespace }} \ --selector=app.kubernetes.io/instance={{ .Release.Name }} --field-selector=status.phase=Running \ -o custom-columns=":metadata.name" | sed '/^$/d' | head -n 1 ) @@ -43,6 +45,20 @@ kubectl cp <插件包本地路径> -n {{ .Release.Namespace }} ${FIRST_RUNNING_P # 解析并导入插件包 kubectl exec -n {{ .Release.Namespace }} ${FIRST_RUNNING_POD} -- python manage.py init_official_plugins +- 导入 2.0 Agent/Proxy 包 + +# [Agent] 将 client 包上传到容器指定目录,可以重复该步骤,导入多个版本的 client 包,client 包一般格式为:gse_agent_ce-2.0.0.tgz +kubectl cp -n {{ .Release.Namespace }} ${FIRST_RUNNING_POD}:/app/official_plugin/gse_agent/ + +# [Proxy] 将 server 包上传到容器指定目录,可以重复该步骤,导入多个版本的 server 包,server 包一般格式为:gse_ce-2.0.0.tgz +# 注意:导入 server 包依赖同版本的 client 包,请确保 client 包已上传或已导入 +kubectl cp -n {{ .Release.Namespace }} ${FIRST_RUNNING_POD}:/app/official_plugin/gse_proxy/ + +# 解析并导入 Agent/Proxy +# -o --overwrite_version 版本号,用于覆盖原始制品内的版本信息,`stable` 为内置版本 +# 修改内置版本:${BK_NODEMAN_URL}/admin_nodeman/node_man/globalsettings/ 新建或修改 `GSE_AGENT2_VERSION`,默认为 `"stable"` +kubectl exec -n {{ .Release.Namespace }} ${FIRST_RUNNING_POD} -- python manage.py init_agents -o stable + - 同步主机相关数据 diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/_helpers.tpl b/support-files/kubernetes/helm/bk-nodeman/templates/_helpers.tpl index 362a3ba04..08ea39f76 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/_helpers.tpl +++ b/support-files/kubernetes/helm/bk-nodeman/templates/_helpers.tpl @@ -159,6 +159,38 @@ processType: "metrics" {{- end -}} +{{/* +返回证书路径 +*/}} +{{- define "bk-nodeman.env.gseCertPath" -}} +{{ .Values.config.gseCertPath | default ( printf "/data/bk%s/cert" .Values.config.bkAppRunEnv ) }} +{{- end -}} + +{{/* +通用卷声明 +*/}} +{{- define "bk-nodeman.volumes" -}} +- name: gse-cert + configMap: + name: "{{ include "bk-nodeman.fullname" . }}-gse-cert-configmap" +{{- if .Values.volumes }} +{{ toYaml .Values.volumes }} +{{- end }} +{{- end }} + + +{{/* +通用卷挂载声明 +*/}} +{{- define "bk-nodeman.volumeMounts" -}} +- name: gse-cert + mountPath: {{ include "bk-nodeman.env.gseCertPath" . }} +{{- if .Values.volumeMounts }} +{{ toYaml .Values.volumeMounts }} +{{- end }} +{{- end }} + + {{/* 后台环境变量 */}} @@ -224,4 +256,6 @@ initContainers: image: "{{ .Values.global.imageRegistry | default .Values.images.k8sWaitFor.registry }}/{{ .Values.images.k8sWaitFor.repository }}:{{ .Values.images.k8sWaitFor.tag }}" imagePullPolicy: "{{ .Values.images.k8sWaitFor.pullPolicy }}" args: ["job", "{{ include "bk-nodeman.migrate-job.file-sync" . }}"] + volumeMounts: + {{- include "bk-nodeman.volumeMounts" . | nindent 6 }} {{- end }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/backend-api/deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/backend-api/deployment.yaml index 0da553848..bedb97450 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/backend-api/deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/backend-api/deployment.yaml @@ -54,10 +54,8 @@ spec: {{- else }} - "bin/hooks/start_cmds/start-backend-api" {{- end }} - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.backend.env" . | nindent 10 }} ports: - name: http @@ -73,6 +71,8 @@ spec: port: {{ $svcPort }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -81,10 +81,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/baworker-deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/baworker-deployment.yaml index df1d4aff0..11fee4c7a 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/baworker-deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/baworker-deployment.yaml @@ -53,13 +53,13 @@ spec: {{- else }} - "bin/hooks/start_cmds/celery/start-baworker" {{- end }} - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.backend.env" . | nindent 10 }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -68,10 +68,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/bworker-deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/bworker-deployment.yaml index db10fcd78..8425b6f7f 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/bworker-deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/bworker-deployment.yaml @@ -53,13 +53,13 @@ spec: {{- else }} - "bin/hooks/start_cmds/celery/start-bworker" {{- end }} - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.backend.env" . | nindent 10 }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -68,10 +68,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/celery-beat-deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/celery-beat-deployment.yaml index e6f716ba3..7ed20a99b 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/celery-beat-deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/celery-beat-deployment.yaml @@ -53,13 +53,13 @@ spec: {{- else }} - "bin/hooks/start_cmds/celery/start-beat" {{- end }} - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.backend.env" . | nindent 10 }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -68,10 +68,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/common-pworker-deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/common-pworker-deployment.yaml index a94956ace..dec85487a 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/common-pworker-deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/common-pworker-deployment.yaml @@ -53,13 +53,13 @@ spec: {{- else }} - "bin/hooks/start_cmds/celery/start-common-pipeline-worker" {{- end }} - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.backend.env" . | nindent 10 }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -68,10 +68,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/common-worker-deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/common-worker-deployment.yaml index 89463bcf5..c57b88579 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/common-worker-deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/common-worker-deployment.yaml @@ -53,13 +53,13 @@ spec: {{- else }} - "bin/hooks/start_cmds/celery/start-common-worker" {{- end }} - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.backend.env" . | nindent 10 }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -68,10 +68,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/dworker-deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/dworker-deployment.yaml index 3e908fd62..3e206ce18 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/dworker-deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/dworker-deployment.yaml @@ -53,13 +53,13 @@ spec: {{- else }} - "bin/hooks/start_cmds/celery/start-dworker" {{- end }} - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.backend.env" . | nindent 10 }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -68,10 +68,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/paworker-deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/paworker-deployment.yaml index aa6fc9d22..c6511cf0b 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/paworker-deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/paworker-deployment.yaml @@ -53,13 +53,13 @@ spec: {{- else }} - "bin/hooks/start_cmds/celery/start-paworker" {{- end }} - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.backend.env" . | nindent 10 }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -68,10 +68,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/psworker-deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/psworker-deployment.yaml index c4215b50e..fea1acc9e 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/psworker-deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/psworker-deployment.yaml @@ -53,13 +53,13 @@ spec: {{- else }} - "bin/hooks/start_cmds/celery/start-psworker" {{- end }} - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.backend.env" . | nindent 10 }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -68,10 +68,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/pworker-deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/pworker-deployment.yaml index f3c8addb6..1ee5538f2 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/pworker-deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/backend-celery/pworker-deployment.yaml @@ -53,13 +53,13 @@ spec: {{- else }} - "bin/hooks/start_cmds/celery/start-pworker" {{- end }} - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.backend.env" . | nindent 10 }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -68,10 +68,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/configmaps/env-configmap.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/configmaps/env-configmap.yaml index d43d8d73e..3a8c26f3a 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/configmaps/env-configmap.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/configmaps/env-configmap.yaml @@ -60,6 +60,7 @@ data: BKAPP_NODEMAN_OUTER_CALLBACK_URL: "{{ .Values.config.bkAppNodemanOuterCallbackUrl | default ( printf "%s/%s" .Values.bkNodemanUrl "backend" ) }}" GSE_VERSION: "{{ .Values.config.gseVersion }}" + GSE_CERT_PATH: {{ include "bk-nodeman.env.gseCertPath" . }} GSE_ENABLE_SVR_DISCOVERY: "{{ .Values.config.gseEnableSvrDisCovery }}" BKAPP_GSE_ZK_HOST: "{{ .Values.config.bkAppGseZkHost }}" BKAPP_GSE_ZK_AUTH: "{{ .Values.config.bkAppGseZkAuth }}" diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/configmaps/gse-cert-configmap.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/configmaps/gse-cert-configmap.yaml new file mode 100644 index 000000000..14fe10ffd --- /dev/null +++ b/support-files/kubernetes/helm/bk-nodeman/templates/configmaps/gse-cert-configmap.yaml @@ -0,0 +1,37 @@ +{{- $fullName := printf "%s-%s" (include "bk-nodeman.fullname" .) "gse-cert-configmap" -}} +apiVersion: v1 +kind: ConfigMap +metadata: + name: "{{ $fullName }}" + labels: + {{- include "bk-nodeman.labels" . | nindent 4 }} + {{- if .Values.commonLabels }} + {{- include "common.tplvalues.render" (dict "value" .Values.commonLabels "context" $) | nindent 4 }} + {{- end }} + {{- with .Values.commonAnnotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +data: + {{- if .Values.gseCert.ca }} + gseca.crt: {{ .Values.gseCert.ca | b64dec | quote }} + {{- end }} + {{- if .Values.gseCert.cert }} + gse_server.crt: {{ .Values.gseCert.cert | b64dec | quote }} + {{- end }} + {{- if .Values.gseCert.key }} + gse_server.key: {{ .Values.gseCert.key | b64dec | quote }} + {{- end }} + {{- if .Values.gseCert.agent.cert }} + gse_agent.crt: {{ .Values.gseCert.agent.cert | b64dec | quote }} + {{- end }} + {{- if .Values.gseCert.agent.key }} + gse_agent.key: {{ .Values.gseCert.agent.key | b64dec | quote }} + {{- end }} + {{- if .Values.gseCert.apiClient.cert }} + gse_api_client.crt: {{ .Values.gseCert.apiClient.cert | b64dec | quote }} + {{- end }} + {{- if .Values.gseCert.apiClient.key }} + gse_api_client.key: {{ .Values.gseCert.apiClient.key | b64dec | quote }} + {{- end }} + from: "{{.Values.config.appCode}}" diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/migrate-jobs/db-migrate-job.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/migrate-jobs/db-migrate-job.yaml index 07f0843f6..ce7234ab5 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/migrate-jobs/db-migrate-job.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/migrate-jobs/db-migrate-job.yaml @@ -42,16 +42,12 @@ spec: args: - "bin/hooks/migrate-db" {{- include "bk-nodeman.saas.env" . | nindent 10 }} - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} resources: {{- toYaml .Values.migrateJob.db.resources | nindent 12 }} - {{- with .Values.volumes }} volumes: - {{- toYaml . | nindent 8 }} - {{- end }} + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/migrate-jobs/file-sync-migrate-job.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/migrate-jobs/file-sync-migrate-job.yaml index 2851b91a6..7d9db634f 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/migrate-jobs/file-sync-migrate-job.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/migrate-jobs/file-sync-migrate-job.yaml @@ -49,16 +49,12 @@ spec: args: - "bin/hooks/file-sync" {{- include "bk-nodeman.backend.env" . | nindent 10 }} - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} resources: {{- toYaml .Values.migrateJob.fileSync.resources | nindent 12 }} - {{- with .Values.volumes }} volumes: - {{- toYaml . | nindent 8 }} - {{- end }} + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/saas-api/deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/saas-api/deployment.yaml index 4040aad45..8337a801f 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/saas-api/deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/saas-api/deployment.yaml @@ -58,10 +58,8 @@ spec: {{- else }} - "bin/hooks/start_cmds/start-saas-api" {{- end }} - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.saas.env" . | nindent 10 }} ports: - name: http @@ -77,6 +75,8 @@ spec: port: {{ $svcPort }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -85,10 +85,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-host-deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-host-deployment.yaml index 91c8e415f..43926c44e 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-host-deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-host-deployment.yaml @@ -47,13 +47,13 @@ spec: command: ["/bin/bash", "-c"] args: - "{{ $moduleConf.command }}" - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.backend.env" . | nindent 10 }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -62,10 +62,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-host-re-deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-host-re-deployment.yaml index 919526ead..3a855b035 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-host-re-deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-host-re-deployment.yaml @@ -47,13 +47,13 @@ spec: command: ["/bin/bash", "-c"] args: - "{{ $moduleConf.command }}" - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.backend.env" . | nindent 10 }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -62,10 +62,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-process-deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-process-deployment.yaml index 56a43a26a..971774c78 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-process-deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-process-deployment.yaml @@ -47,13 +47,13 @@ spec: command: ["/bin/bash", "-c"] args: - "{{ $moduleConf.command }}" - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.backend.env" . | nindent 10 }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -62,10 +62,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-watch-deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-watch-deployment.yaml index 505ecec58..aafe68061 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-watch-deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/sync-deployments/sync-watch-deployment.yaml @@ -47,13 +47,13 @@ spec: command: ["/bin/bash", "-c"] args: - "{{ $moduleConf.command }}" - {{- with .Values.volumeMounts }} volumeMounts: - {{- toYaml . | nindent 12 }} - {{- end }} + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} {{- include "bk-nodeman.backend.env" . | nindent 10 }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -62,10 +62,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - {{- with .Values.volumes }} - volumes: - {{- toYaml . | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/templates/web/deployment.yaml b/support-files/kubernetes/helm/bk-nodeman/templates/web/deployment.yaml index c15b6671f..46baaddf6 100644 --- a/support-files/kubernetes/helm/bk-nodeman/templates/web/deployment.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/templates/web/deployment.yaml @@ -52,11 +52,9 @@ spec: args: - "python manage.py collectstatic --noinput" volumeMounts: + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} - name: "static" mountPath: "/app/staticfiles/" - {{- if .Values.volumeMounts }} - {{- include "common.tplvalues.render" (dict "value" .Values.volumeMounts "context" $) | nindent 12 }} - {{- end }} {{- include "bk-nodeman.saas.env" . | nindent 10 }} containers: - name: "{{ $fullName }}" @@ -65,13 +63,11 @@ spec: image: "{{ .Values.global.imageRegistry | default .Values.images.nginx.registry }}/{{ .Values.images.nginx.repository }}:{{ .Values.images.nginx.tag }}" imagePullPolicy: "{{ .Values.images.nginx.pullPolicy }}" volumeMounts: + {{- include "bk-nodeman.volumeMounts" . | nindent 12 }} - name: "nginx-conf" mountPath: "/etc/nginx/conf.d/" - name: "static" mountPath: "/app/staticfiles/" - {{- if .Values.volumeMounts }} - {{- include "common.tplvalues.render" (dict "value" .Values.volumeMounts "context" $) | nindent 12 }} - {{- end }} ports: - name: http containerPort: {{ $svcPort }} @@ -86,6 +82,13 @@ spec: port: {{ $svcPort }} resources: {{- toYaml $moduleConf.resources | nindent 12 }} + volumes: + {{- include "bk-nodeman.volumes" . | nindent 8 }} + - name: static + emptyDir: {} + - name: nginx-conf + configMap: + name: "{{ include "bk-nodeman.saas-web.fullname" . }}-nginx-configmap" {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -94,15 +97,6 @@ spec: affinity: {{- toYaml . | nindent 8 }} {{- end }} - volumes: - - name: static - emptyDir: {} - - name: nginx-conf - configMap: - name: "{{ include "bk-nodeman.saas-web.fullname" . }}-nginx-configmap" - {{- if .Values.volumes }} - {{- include "common.tplvalues.render" (dict "value" .Values.volumes "context" $) | nindent 8 }} - {{- end }} {{- with .Values.tolerations }} tolerations: {{- toYaml . | nindent 8 }} diff --git a/support-files/kubernetes/helm/bk-nodeman/values.yaml b/support-files/kubernetes/helm/bk-nodeman/values.yaml index 0103be3af..3c4a8e0a1 100644 --- a/support-files/kubernetes/helm/bk-nodeman/values.yaml +++ b/support-files/kubernetes/helm/bk-nodeman/values.yaml @@ -257,6 +257,42 @@ bkIamApiUrl: "http://bkiam-api.example.com" ## 蓝鲸制品库浏览器访问域名和后台 API http://bkiam-api.example.com 域名同一个 bkRepoUrl: "http://bkrepo.example.com" + +## -------------------------------------- +## GSE 证书 +## -------------------------------------- +gseCert: + + ## 证书 CA 内容配置(base64) + ca: "" + + ## Server 侧 CERT 内容配置(base64) + cert: "" + + ## Server 侧 KEY 内容配置(base64) + key: "" + + ## API 侧 CERT + ## + apiClient: + + ## API 侧 CERT 内容配置, 用于其他服务调用 GSE(base64) + cert: "" + + ## API 侧 KEY 内容配置, 用于其他服务调用 GSE(base64) + key: "" + + ## Agent 侧 CERT + ## + agent: + + ## Agent 侧 CERT 内容配置, 用于 Agent 链路(base64) + cert: "" + + ## Agent 侧 KEY 内容配置, 用于 Agent 链路(base64) + key: "" + + ## -------------------------------------- ## 节点管理系统配置 ## -------------------------------------- @@ -327,6 +363,8 @@ config: ## ## 平台版本,默认为 `V1`,可选:`V1` `V2` gseVersion: "V1" + ## GSE 本地证书路径,渲染时为空默认取 `/data/bk{{ .Values.config.bkAppRunEnv }}/cert` + gseCertPath: "" ## 是否启用 GSE 服务探测,为 `true` 将定期更新默认接入点的 gse svr 信息 gseEnableSvrDisCovery: true ## ZK hosts 信息,host:port,多个 hosts 以 `,` 分隔 diff --git a/support-files/kubernetes/images/family_bucket/Dockerfile b/support-files/kubernetes/images/family_bucket/Dockerfile index 1854ce6ef..541916062 100644 --- a/support-files/kubernetes/images/family_bucket/Dockerfile +++ b/support-files/kubernetes/images/family_bucket/Dockerfile @@ -82,6 +82,7 @@ FROM base-app AS ieod-app RUN set -ex && \ rm -rf blueking && \ rm -rf bkoauth && \ + pip install --no-cache-dir python-crontab==2.6.0 && \ pip install --no-cache-dir blueking-component-ieod==0.1.12 && \ pip install --no-cache-dir bkoauth==0.0.22