diff --git a/dbm-ui/backend/db_meta/api/cluster/sqlserverha/create_cluster.py b/dbm-ui/backend/db_meta/api/cluster/sqlserverha/create_cluster.py index 3d4c7ddb97..723f256f1a 100644 --- a/dbm-ui/backend/db_meta/api/cluster/sqlserverha/create_cluster.py +++ b/dbm-ui/backend/db_meta/api/cluster/sqlserverha/create_cluster.py @@ -26,6 +26,7 @@ from backend.db_meta.exceptions import DBMetaException from backend.db_meta.models import Cluster, ClusterEntry, StorageInstance, StorageInstanceTuple from backend.db_meta.models.storage_set_dtl import SqlserverClusterSyncMode +from backend.flow.utils.sqlserver.sqlserver_db_function import get_instance_time_zone logger = logging.getLogger("root") @@ -70,7 +71,6 @@ def create( major_version: str, db_module_id: int, bk_cloud_id: int, - time_zone: str, region: str, sync_type: str, slave_domain: Optional[str] = None, @@ -104,7 +104,7 @@ def create( phase=ClusterPhase.ONLINE.value, status=ClusterStatus.NORMAL.value, bk_cloud_id=bk_cloud_id, - time_zone=time_zone, + time_zone=get_instance_time_zone(master_storage_obj), major_version=major_version, region=region, ) @@ -138,6 +138,8 @@ def create( m = ins.machine ins.db_module_id = db_module_id m.db_module_id = db_module_id + # 保存最新的time_zone + ins.time_zone = get_instance_time_zone(ins) ins.save(update_fields=["db_module_id"]) m.save(update_fields=["db_module_id"]) diff --git a/dbm-ui/backend/db_meta/api/cluster/sqlserverha/handler.py b/dbm-ui/backend/db_meta/api/cluster/sqlserverha/handler.py index 2fdb639baf..25347b26d4 100644 --- a/dbm-ui/backend/db_meta/api/cluster/sqlserverha/handler.py +++ b/dbm-ui/backend/db_meta/api/cluster/sqlserverha/handler.py @@ -112,7 +112,6 @@ def create( storages=storages, creator=creator, bk_cloud_id=bk_cloud_id, - time_zone=time_zone, major_version=major_version, region=region, sync_type=sync_type, diff --git a/dbm-ui/backend/db_meta/api/cluster/sqlserversingle/create_cluster.py b/dbm-ui/backend/db_meta/api/cluster/sqlserversingle/create_cluster.py index d8350fc2b4..49dfdf4e08 100644 --- a/dbm-ui/backend/db_meta/api/cluster/sqlserversingle/create_cluster.py +++ b/dbm-ui/backend/db_meta/api/cluster/sqlserversingle/create_cluster.py @@ -19,6 +19,7 @@ from backend.db_meta.enums import ClusterEntryType, ClusterPhase, ClusterStatus, ClusterType from backend.db_meta.exceptions import DBMetaException from backend.db_meta.models import Cluster, ClusterEntry, StorageInstance +from backend.flow.utils.sqlserver.sqlserver_db_function import get_instance_time_zone logger = logging.getLogger("root") @@ -48,7 +49,6 @@ def create( db_module_id: int, storage: Dict, bk_cloud_id: int, - time_zone: str, region: str, creator: str = "", ) -> Cluster: @@ -69,7 +69,7 @@ def create( phase=ClusterPhase.ONLINE.value, status=ClusterStatus.NORMAL.value, bk_cloud_id=bk_cloud_id, - time_zone=time_zone, + time_zone=get_instance_time_zone(storage_objs[0]), major_version=major_version, region=region, ) @@ -85,7 +85,8 @@ def create( m = ins.machine ins.db_module_id = db_module_id m.db_module_id = db_module_id + # 保存最新的time_zone + ins.time_zone = get_instance_time_zone(ins) ins.save(update_fields=["db_module_id"]) m.save(update_fields=["db_module_id"]) - return cluster diff --git a/dbm-ui/backend/db_meta/api/cluster/sqlserversingle/handler.py b/dbm-ui/backend/db_meta/api/cluster/sqlserversingle/handler.py index 0aae7a5e3c..a01c706e5d 100644 --- a/dbm-ui/backend/db_meta/api/cluster/sqlserversingle/handler.py +++ b/dbm-ui/backend/db_meta/api/cluster/sqlserversingle/handler.py @@ -84,7 +84,6 @@ def create( storage=storage, creator=creator, bk_cloud_id=bk_cloud_id, - time_zone=time_zone, region=region, ) ) diff --git a/dbm-ui/backend/flow/consts.py b/dbm-ui/backend/flow/consts.py index 0c26693029..3f92ff93e6 100644 --- a/dbm-ui/backend/flow/consts.py +++ b/dbm-ui/backend/flow/consts.py @@ -1109,11 +1109,11 @@ class SqlserverSysVersion(str, StructuredEnum): 定义Sqlserver支持操作系统版本名称 """ - Windows_Server_2008 = EnumField("Windows Server 2008", _("2008服务器版")) - Windows_Server_2012 = EnumField("Windows Server 2012", _("2012服务器版")) - Windows_Server_2016 = EnumField("Windows Server 2016", _("2016服务器版")) - Windows_Server_2019 = EnumField("Windows Server 2019", _("2019服务器版")) - Windows_Server_2022 = EnumField("Windows Server 2022", _("2022服务器版")) + Windows_Server_2008 = EnumField("WindowsServer2008", _("2008服务器版")) + Windows_Server_2012 = EnumField("WindowsServer2012", _("2012服务器版")) + Windows_Server_2016 = EnumField("WindowsServer2016", _("2016服务器版")) + Windows_Server_2019 = EnumField("WindowsServer2019", _("2019服务器版")) + Windows_Server_2022 = EnumField("WindowsServer2022", _("2022服务器版")) # mssql各版本的操作系统版本支持 diff --git a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/build_database_sync.py b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/build_database_sync.py index 8e723f988f..67785d35e4 100644 --- a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/build_database_sync.py +++ b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/build_database_sync.py @@ -9,6 +9,7 @@ """ import logging.config +from dataclasses import asdict from django.utils.translation import ugettext as _ @@ -17,6 +18,10 @@ from backend.flow.engine.bamboo.scene.common.builder import Builder, SubBuilder from backend.flow.engine.bamboo.scene.sqlserver.base_flow import BaseFlow from backend.flow.engine.bamboo.scene.sqlserver.common_sub_flow import sync_dbs_for_cluster_sub_flow +from backend.flow.plugins.components.collections.sqlserver.create_random_job_user import SqlserverAddJobUserComponent +from backend.flow.plugins.components.collections.sqlserver.drop_random_job_user import SqlserverDropJobUserComponent +from backend.flow.utils.sqlserver.sqlserver_act_dataclass import CreateRandomJobUserKwargs, DropRandomJobUserKwargs +from backend.flow.utils.sqlserver.sqlserver_db_function import create_sqlserver_login_sid from backend.flow.utils.sqlserver.sqlserver_host import Host logger = logging.getLogger("flow") @@ -50,6 +55,20 @@ def run_flow(self): # 声明子流程 sub_pipeline = SubBuilder(root_id=self.root_id, data=self.data) + + # 创建随机账号 + sub_pipeline.add_act( + act_name=_("create job user"), + act_component_code=SqlserverAddJobUserComponent.code, + kwargs=asdict( + CreateRandomJobUserKwargs( + cluster_ids=[cluster.id], + sid=create_sqlserver_login_sid(), + ), + ), + ) + + # 数据同步子流程 sub_pipeline.add_sub_pipeline( sub_flow=sync_dbs_for_cluster_sub_flow( uid=self.data["uid"], @@ -59,6 +78,14 @@ def run_flow(self): sync_dbs=info["sync_dbs"], ) ) + + # 删除随机账号 + sub_pipeline.add_act( + act_name=_("drop job user"), + act_component_code=SqlserverDropJobUserComponent.code, + kwargs=asdict(DropRandomJobUserKwargs(cluster_ids=[cluster.id])), + ) + sub_pipelines.append(sub_pipeline.build_sub_process(sub_name=_("{}集群建立数据库同步".format(cluster.name)))) main_pipeline.add_parallel_sub_pipeline(sub_flow_list=sub_pipelines) diff --git a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_add_slave.py b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_add_slave.py index 9c357be7ac..86cf512e6b 100644 --- a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_add_slave.py +++ b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_add_slave.py @@ -23,9 +23,19 @@ install_sqlserver_sub_flow, sync_dbs_for_cluster_sub_flow, ) +from backend.flow.plugins.components.collections.sqlserver.create_random_job_user import SqlserverAddJobUserComponent +from backend.flow.plugins.components.collections.sqlserver.drop_random_job_user import SqlserverDropJobUserComponent from backend.flow.plugins.components.collections.sqlserver.sqlserver_db_meta import SqlserverDBMetaComponent -from backend.flow.utils.sqlserver.sqlserver_act_dataclass import DBMetaOPKwargs -from backend.flow.utils.sqlserver.sqlserver_db_function import get_dbs_for_drs, get_group_name +from backend.flow.utils.sqlserver.sqlserver_act_dataclass import ( + CreateRandomJobUserKwargs, + DBMetaOPKwargs, + DropRandomJobUserKwargs, +) +from backend.flow.utils.sqlserver.sqlserver_db_function import ( + create_sqlserver_login_sid, + get_dbs_for_drs, + get_group_name, +) from backend.flow.utils.sqlserver.sqlserver_db_meta import SqlserverDBMeta from backend.flow.utils.sqlserver.sqlserver_host import Host from backend.flow.utils.sqlserver.validate import SqlserverCluster, SqlserverInstance @@ -128,6 +138,19 @@ def run_flow(self): ) ) + # 创建随机账号 + cluster_sub_pipeline.add_act( + act_name=_("create job user"), + act_component_code=SqlserverAddJobUserComponent.code, + kwargs=asdict( + CreateRandomJobUserKwargs( + cluster_ids=[cluster.id], + sid=create_sqlserver_login_sid(), + other_instances=[f"{info['new_slave_host']['ip']}:{master_instance.port}"], + ), + ), + ) + # 加入到集群的AlwaysOn可用组 cluster_sub_pipeline.add_sub_pipeline( sub_flow=build_always_on_sub_flow( @@ -156,6 +179,18 @@ def run_flow(self): ) ) + # 删除随机账号 + cluster_sub_pipeline.add_act( + act_name=_("drop job user"), + act_component_code=SqlserverDropJobUserComponent.code, + kwargs=asdict( + DropRandomJobUserKwargs( + cluster_ids=[cluster.id], + other_instances=[f"{info['new_slave_host']['ip']}:{master_instance.port}"], + ), + ), + ) + cluster_flows.append( cluster_sub_pipeline.build_sub_process(sub_name=_("[{}]集群与新slave建立关系".format(cluster.name))) ) diff --git a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_backup_dbs.py b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_backup_dbs.py index d429bfe3aa..8e6a41b57e 100644 --- a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_backup_dbs.py +++ b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_backup_dbs.py @@ -20,10 +20,18 @@ from backend.flow.engine.bamboo.scene.common.builder import Builder, SubBuilder from backend.flow.engine.bamboo.scene.common.get_file_list import GetFileList from backend.flow.engine.bamboo.scene.sqlserver.base_flow import BaseFlow +from backend.flow.plugins.components.collections.sqlserver.create_random_job_user import SqlserverAddJobUserComponent +from backend.flow.plugins.components.collections.sqlserver.drop_random_job_user import SqlserverDropJobUserComponent from backend.flow.plugins.components.collections.sqlserver.exec_actuator_script import SqlserverActuatorScriptComponent from backend.flow.plugins.components.collections.sqlserver.trans_files import TransFileInWindowsComponent -from backend.flow.utils.sqlserver.sqlserver_act_dataclass import DownloadMediaKwargs, ExecActuatorKwargs +from backend.flow.utils.sqlserver.sqlserver_act_dataclass import ( + CreateRandomJobUserKwargs, + DownloadMediaKwargs, + DropRandomJobUserKwargs, + ExecActuatorKwargs, +) from backend.flow.utils.sqlserver.sqlserver_act_payload import SqlserverActPayload +from backend.flow.utils.sqlserver.sqlserver_db_function import create_sqlserver_login_sid from backend.flow.utils.sqlserver.sqlserver_host import Host logger = logging.getLogger("flow") @@ -63,6 +71,18 @@ def run_flow(self): # 声明子流程 sub_pipeline = SubBuilder(root_id=self.root_id, data=copy.deepcopy(sub_flow_context)) + # 创建随机账号 + sub_pipeline.add_act( + act_name=_("create job user"), + act_component_code=SqlserverAddJobUserComponent.code, + kwargs=asdict( + CreateRandomJobUserKwargs( + cluster_ids=[cluster.id], + sid=create_sqlserver_login_sid(), + ), + ), + ) + sub_pipeline.add_act( act_name=_("下发执行器"), act_component_code=TransFileInWindowsComponent.code, @@ -88,6 +108,13 @@ def run_flow(self): ), ) + # 删除随机账号 + sub_pipeline.add_act( + act_name=_("drop job user"), + act_component_code=SqlserverDropJobUserComponent.code, + kwargs=asdict(DropRandomJobUserKwargs(cluster_ids=[cluster.id])), + ) + sub_pipelines.append(sub_pipeline.build_sub_process(sub_name=_("{}执行备份".format(cluster.name)))) main_pipeline.add_parallel_sub_pipeline(sub_flow_list=sub_pipelines) diff --git a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_clean_dbs.py b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_clean_dbs.py index 9d87672b4d..f0fb019f6f 100644 --- a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_clean_dbs.py +++ b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_clean_dbs.py @@ -22,10 +22,18 @@ from backend.flow.engine.bamboo.scene.common.builder import Builder, SubBuilder from backend.flow.engine.bamboo.scene.common.get_file_list import GetFileList from backend.flow.engine.bamboo.scene.sqlserver.base_flow import BaseFlow +from backend.flow.plugins.components.collections.sqlserver.create_random_job_user import SqlserverAddJobUserComponent +from backend.flow.plugins.components.collections.sqlserver.drop_random_job_user import SqlserverDropJobUserComponent from backend.flow.plugins.components.collections.sqlserver.exec_actuator_script import SqlserverActuatorScriptComponent from backend.flow.plugins.components.collections.sqlserver.trans_files import TransFileInWindowsComponent -from backend.flow.utils.sqlserver.sqlserver_act_dataclass import DownloadMediaKwargs, ExecActuatorKwargs +from backend.flow.utils.sqlserver.sqlserver_act_dataclass import ( + CreateRandomJobUserKwargs, + DownloadMediaKwargs, + DropRandomJobUserKwargs, + ExecActuatorKwargs, +) from backend.flow.utils.sqlserver.sqlserver_act_payload import SqlserverActPayload +from backend.flow.utils.sqlserver.sqlserver_db_function import create_sqlserver_login_sid from backend.flow.utils.sqlserver.sqlserver_host import Host logger = logging.getLogger("flow") @@ -78,6 +86,18 @@ def run_flow(self): # 声明子流程 sub_pipeline = SubBuilder(root_id=self.root_id, data=copy.deepcopy(sub_flow_context)) + # 创建随机账号 + sub_pipeline.add_act( + act_name=_("create job user"), + act_component_code=SqlserverAddJobUserComponent.code, + kwargs=asdict( + CreateRandomJobUserKwargs( + cluster_ids=[cluster.id], + sid=create_sqlserver_login_sid(), + ), + ), + ) + sub_pipeline.add_act( act_name=_("下发执行器"), act_component_code=TransFileInWindowsComponent.code, @@ -101,6 +121,13 @@ def run_flow(self): ), ) + # 删除随机账号 + sub_pipeline.add_act( + act_name=_("drop job user"), + act_component_code=SqlserverDropJobUserComponent.code, + kwargs=asdict(DropRandomJobUserKwargs(cluster_ids=[cluster.id])), + ) + sub_pipelines.append(sub_pipeline.build_sub_process(sub_name=_("{}集群执行清档".format(cluster.name)))) main_pipeline.add_parallel_sub_pipeline(sub_flow_list=sub_pipelines) diff --git a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_cluster_destroy.py b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_cluster_destroy.py index cd0e26d174..b206c0efff 100644 --- a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_cluster_destroy.py +++ b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_cluster_destroy.py @@ -21,16 +21,19 @@ from backend.flow.engine.bamboo.scene.common.get_file_list import GetFileList from backend.flow.engine.bamboo.scene.sqlserver.base_flow import BaseFlow from backend.flow.plugins.components.collections.mysql.dns_manage import MySQLDnsManageComponent +from backend.flow.plugins.components.collections.sqlserver.create_random_job_user import SqlserverAddJobUserComponent from backend.flow.plugins.components.collections.sqlserver.exec_actuator_script import SqlserverActuatorScriptComponent from backend.flow.plugins.components.collections.sqlserver.sqlserver_db_meta import SqlserverDBMetaComponent from backend.flow.plugins.components.collections.sqlserver.trans_files import TransFileInWindowsComponent from backend.flow.utils.mysql.mysql_act_dataclass import DeleteClusterDnsKwargs from backend.flow.utils.sqlserver.sqlserver_act_dataclass import ( + CreateRandomJobUserKwargs, DBMetaOPKwargs, DownloadMediaKwargs, ExecActuatorKwargs, ) from backend.flow.utils.sqlserver.sqlserver_act_payload import SqlserverActPayload +from backend.flow.utils.sqlserver.sqlserver_db_function import create_sqlserver_login_sid from backend.flow.utils.sqlserver.sqlserver_db_meta import SqlserverDBMeta from backend.flow.utils.sqlserver.sqlserver_host import Host @@ -77,6 +80,18 @@ def run_flow(self): # 声明子流程 sub_pipeline = SubBuilder(root_id=self.root_id, data=copy.deepcopy(sub_flow_context)) + # 创建随机账号 + sub_pipeline.add_act( + act_name=_("create job user"), + act_component_code=SqlserverAddJobUserComponent.code, + kwargs=asdict( + CreateRandomJobUserKwargs( + cluster_ids=[cluster.id], + sid=create_sqlserver_login_sid(), + ), + ), + ) + # 下发执行器 sub_pipeline.add_act( act_name=_("下发执行器"), diff --git a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_cluster_reset.py b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_cluster_reset.py index b3b51aa012..8fd3dd67d2 100644 --- a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_cluster_reset.py +++ b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_cluster_reset.py @@ -23,19 +23,23 @@ from backend.flow.engine.bamboo.scene.common.get_file_list import GetFileList from backend.flow.engine.bamboo.scene.sqlserver.base_flow import BaseFlow from backend.flow.plugins.components.collections.mysql.dns_manage import MySQLDnsManageComponent +from backend.flow.plugins.components.collections.sqlserver.create_random_job_user import SqlserverAddJobUserComponent +from backend.flow.plugins.components.collections.sqlserver.drop_random_job_user import SqlserverDropJobUserComponent from backend.flow.plugins.components.collections.sqlserver.exec_actuator_script import SqlserverActuatorScriptComponent from backend.flow.plugins.components.collections.sqlserver.exec_sqlserver_login import ExecSqlserverLoginComponent from backend.flow.plugins.components.collections.sqlserver.sqlserver_db_meta import SqlserverDBMetaComponent from backend.flow.plugins.components.collections.sqlserver.trans_files import TransFileInWindowsComponent from backend.flow.utils.mysql.mysql_act_dataclass import CreateDnsKwargs, DeleteClusterDnsKwargs from backend.flow.utils.sqlserver.sqlserver_act_dataclass import ( + CreateRandomJobUserKwargs, DBMetaOPKwargs, DownloadMediaKwargs, + DropRandomJobUserKwargs, ExecActuatorKwargs, ExecLoginKwargs, ) from backend.flow.utils.sqlserver.sqlserver_act_payload import SqlserverActPayload -from backend.flow.utils.sqlserver.sqlserver_db_function import get_dbs_for_drs +from backend.flow.utils.sqlserver.sqlserver_db_function import create_sqlserver_login_sid, get_dbs_for_drs from backend.flow.utils.sqlserver.sqlserver_db_meta import SqlserverDBMeta from backend.flow.utils.sqlserver.sqlserver_host import Host @@ -101,6 +105,18 @@ def run_flow(self): # 声明子流程 sub_pipeline = SubBuilder(root_id=self.root_id, data=copy.deepcopy(sub_flow_context)) + # 创建随机账号 + sub_pipeline.add_act( + act_name=_("create job user"), + act_component_code=SqlserverAddJobUserComponent.code, + kwargs=asdict( + CreateRandomJobUserKwargs( + cluster_ids=[cluster.id], + sid=create_sqlserver_login_sid(), + ), + ), + ) + # 下发执行器 sub_pipeline.add_act( act_name=_("下发执行器"), @@ -193,6 +209,13 @@ def run_flow(self): ), ) + # 删除随机账号 + sub_pipeline.add_act( + act_name=_("drop job user"), + act_component_code=SqlserverDropJobUserComponent.code, + kwargs=asdict(DropRandomJobUserKwargs(cluster_ids=[cluster.id])), + ) + sub_pipelines.append(sub_pipeline.build_sub_process(sub_name=_("{}集群重置".format(cluster.name)))) main_pipeline.add_parallel_sub_pipeline(sub_flow_list=sub_pipelines) diff --git a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_db_construct.py b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_db_construct.py index 1085aacb16..08f039d624 100644 --- a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_db_construct.py +++ b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_db_construct.py @@ -24,14 +24,19 @@ from backend.flow.engine.bamboo.scene.common.get_file_list import GetFileList from backend.flow.engine.bamboo.scene.sqlserver.base_flow import BaseFlow from backend.flow.engine.bamboo.scene.sqlserver.common_sub_flow import download_backup_file_sub_flow +from backend.flow.plugins.components.collections.sqlserver.create_random_job_user import SqlserverAddJobUserComponent +from backend.flow.plugins.components.collections.sqlserver.drop_random_job_user import SqlserverDropJobUserComponent from backend.flow.plugins.components.collections.sqlserver.exec_actuator_script import SqlserverActuatorScriptComponent from backend.flow.plugins.components.collections.sqlserver.trans_files import TransFileInWindowsComponent from backend.flow.utils.sqlserver.sqlserver_act_dataclass import ( + CreateRandomJobUserKwargs, DownloadMediaKwargs, + DropRandomJobUserKwargs, ExecActuatorKwargs, SqlserverDBConstructContext, ) from backend.flow.utils.sqlserver.sqlserver_act_payload import SqlserverActPayload +from backend.flow.utils.sqlserver.sqlserver_db_function import create_sqlserver_login_sid from backend.flow.utils.sqlserver.sqlserver_host import Host from backend.utils.time import str2datetime, trans_time_zone @@ -146,6 +151,18 @@ def run_flow(self): # 声明子流程 sub_pipeline = SubBuilder(root_id=self.root_id, data=copy.deepcopy(sub_flow_context)) + # 创建随机账号 + sub_pipeline.add_act( + act_name=_("create job user"), + act_component_code=SqlserverAddJobUserComponent.code, + kwargs=asdict( + CreateRandomJobUserKwargs( + cluster_ids=[target_cluster.id], + sid=create_sqlserver_login_sid(), + ), + ), + ) + # 下发执行器 sub_pipeline.add_act( act_name=_("下发执行器到目标集群master[{}]".format(target_master_instance.machine.ip)), @@ -245,6 +262,13 @@ def run_flow(self): ), ) + # 删除随机账号 + sub_pipeline.add_act( + act_name=_("drop job user"), + act_component_code=SqlserverDropJobUserComponent.code, + kwargs=asdict(DropRandomJobUserKwargs(cluster_ids=[target_cluster.id])), + ) + sub_pipelines.append( sub_pipeline.build_sub_process( sub_name=_("[{}]->[{}]数据构造流程".format(cluster.name, target_cluster.name)) diff --git a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_dts.py b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_dts.py index 360e7f79cf..ee5450eb42 100644 --- a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_dts.py +++ b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_dts.py @@ -26,6 +26,8 @@ from backend.flow.engine.bamboo.scene.common.builder import Builder, SubBuilder from backend.flow.engine.bamboo.scene.common.get_file_list import GetFileList from backend.flow.engine.bamboo.scene.sqlserver.base_flow import BaseFlow +from backend.flow.plugins.components.collections.sqlserver.create_random_job_user import SqlserverAddJobUserComponent +from backend.flow.plugins.components.collections.sqlserver.drop_random_job_user import SqlserverDropJobUserComponent from backend.flow.plugins.components.collections.sqlserver.exec_actuator_script import SqlserverActuatorScriptComponent from backend.flow.plugins.components.collections.sqlserver.exec_sqlserver_backup_job import ( ExecSqlserverBackupJobComponent, @@ -34,14 +36,17 @@ from backend.flow.plugins.components.collections.sqlserver.sqlserver_db_meta import SqlserverDBMetaComponent from backend.flow.plugins.components.collections.sqlserver.trans_files import TransFileInWindowsComponent from backend.flow.utils.sqlserver.sqlserver_act_dataclass import ( + CreateRandomJobUserKwargs, DBMetaOPKwargs, DownloadMediaKwargs, + DropRandomJobUserKwargs, ExecActuatorKwargs, ExecBackupJobsKwargs, P2PFileForWindowKwargs, RestoreForDtsKwargs, ) from backend.flow.utils.sqlserver.sqlserver_act_payload import SqlserverActPayload +from backend.flow.utils.sqlserver.sqlserver_db_function import create_sqlserver_login_sid from backend.flow.utils.sqlserver.sqlserver_db_meta import SqlserverDBMeta from backend.flow.utils.sqlserver.sqlserver_host import Host @@ -93,6 +98,18 @@ def full_dts_flow(self): # 声明子流程 sub_pipeline = SubBuilder(root_id=self.root_id, data=copy.deepcopy(sub_flow_context)) + # 创建随机账号 + sub_pipeline.add_act( + act_name=_("create job user"), + act_component_code=SqlserverAddJobUserComponent.code, + kwargs=asdict( + CreateRandomJobUserKwargs( + cluster_ids=[cluster.id, target_cluster.id], + sid=create_sqlserver_login_sid(), + ), + ), + ) + # 先禁用原集群的master例行备份逻辑 sub_pipeline.add_act( act_name=_("禁用源master[{}]的backup jobs".format(master_instance.ip_port)), @@ -222,6 +239,13 @@ def full_dts_flow(self): ), ) + # 删除随机账号 + sub_pipeline.add_act( + act_name=_("drop job user"), + act_component_code=SqlserverDropJobUserComponent.code, + kwargs=asdict(DropRandomJobUserKwargs(cluster_ids=[cluster.id, target_cluster.id])), + ) + sub_pipelines.append( sub_pipeline.build_sub_process( sub_name=_("[{}]->[{}]全量数据迁移流程".format(cluster.name, target_cluster.name)) @@ -271,6 +295,18 @@ def incr_dts_flow(self): # 声明子流程 sub_pipeline = SubBuilder(root_id=self.root_id, data=copy.deepcopy(sub_flow_context)) + # 创建随机账号 + sub_pipeline.add_act( + act_name=_("create job user"), + act_component_code=SqlserverAddJobUserComponent.code, + kwargs=asdict( + CreateRandomJobUserKwargs( + cluster_ids=[cluster.id, target_cluster.id], + sid=create_sqlserver_login_sid(), + ), + ), + ) + # 给目标集群的master和源集群master下发执行器 sub_pipeline.add_act( act_name=_("下发执行器"), @@ -359,6 +395,13 @@ def incr_dts_flow(self): ), ) + # 删除随机账号 + sub_pipeline.add_act( + act_name=_("drop job user"), + act_component_code=SqlserverDropJobUserComponent.code, + kwargs=asdict(DropRandomJobUserKwargs(cluster_ids=[cluster.id, target_cluster.id])), + ) + sub_pipelines.append( sub_pipeline.build_sub_process( sub_name=_("[{}]->[{}]增量数据迁移流程".format(cluster.name, target_cluster.name)) diff --git a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_master_slave_switch.py b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_master_slave_switch.py index e2b6a8f1e3..182196bb3a 100644 --- a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_master_slave_switch.py +++ b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_master_slave_switch.py @@ -26,15 +26,20 @@ pre_check_sub_flow, switch_domain_sub_flow_for_cluster, ) +from backend.flow.plugins.components.collections.sqlserver.create_random_job_user import SqlserverAddJobUserComponent +from backend.flow.plugins.components.collections.sqlserver.drop_random_job_user import SqlserverDropJobUserComponent from backend.flow.plugins.components.collections.sqlserver.exec_actuator_script import SqlserverActuatorScriptComponent from backend.flow.plugins.components.collections.sqlserver.sqlserver_db_meta import SqlserverDBMetaComponent from backend.flow.plugins.components.collections.sqlserver.trans_files import TransFileInWindowsComponent from backend.flow.utils.sqlserver.sqlserver_act_dataclass import ( + CreateRandomJobUserKwargs, DBMetaOPKwargs, DownloadMediaKwargs, + DropRandomJobUserKwargs, ExecActuatorKwargs, ) from backend.flow.utils.sqlserver.sqlserver_act_payload import SqlserverActPayload +from backend.flow.utils.sqlserver.sqlserver_db_function import create_sqlserver_login_sid from backend.flow.utils.sqlserver.sqlserver_db_meta import SqlserverDBMeta from backend.flow.utils.sqlserver.sqlserver_host import Host @@ -109,6 +114,18 @@ def run_flow(self): # 启动子流程 cluster_pipeline = SubBuilder(root_id=self.root_id, data=cluster_context) + # 创建随机账号 + cluster_pipeline.add_act( + act_name=_("create job user"), + act_component_code=SqlserverAddJobUserComponent.code, + kwargs=asdict( + CreateRandomJobUserKwargs( + cluster_ids=[cluster.id], + sid=create_sqlserver_login_sid(), + ), + ), + ) + if not self.data["force"]: # 如果是强制模式,不做预检测, 不做克隆 cluster_pipeline.add_sub_pipeline( @@ -155,6 +172,13 @@ def run_flow(self): ) ) + # 删除随机账号 + cluster_pipeline.add_act( + act_name=_("drop job user"), + act_component_code=SqlserverDropJobUserComponent.code, + kwargs=asdict(DropRandomJobUserKwargs(cluster_ids=[cluster.id])), + ) + act_list.append(cluster_pipeline.build_sub_process(sub_name=_("{}集群互切".format(cluster.name)))) # 拼接集群维度的子流程 diff --git a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_rename_dbs.py b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_rename_dbs.py index bd8eea70a1..f3d21c27f1 100644 --- a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_rename_dbs.py +++ b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_rename_dbs.py @@ -22,10 +22,18 @@ from backend.flow.engine.bamboo.scene.common.builder import Builder, SubBuilder from backend.flow.engine.bamboo.scene.common.get_file_list import GetFileList from backend.flow.engine.bamboo.scene.sqlserver.base_flow import BaseFlow +from backend.flow.plugins.components.collections.sqlserver.create_random_job_user import SqlserverAddJobUserComponent +from backend.flow.plugins.components.collections.sqlserver.drop_random_job_user import SqlserverDropJobUserComponent from backend.flow.plugins.components.collections.sqlserver.exec_actuator_script import SqlserverActuatorScriptComponent from backend.flow.plugins.components.collections.sqlserver.trans_files import TransFileInWindowsComponent -from backend.flow.utils.sqlserver.sqlserver_act_dataclass import DownloadMediaKwargs, ExecActuatorKwargs +from backend.flow.utils.sqlserver.sqlserver_act_dataclass import ( + CreateRandomJobUserKwargs, + DownloadMediaKwargs, + DropRandomJobUserKwargs, + ExecActuatorKwargs, +) from backend.flow.utils.sqlserver.sqlserver_act_payload import SqlserverActPayload +from backend.flow.utils.sqlserver.sqlserver_db_function import create_sqlserver_login_sid from backend.flow.utils.sqlserver.sqlserver_host import Host logger = logging.getLogger("flow") @@ -78,6 +86,18 @@ def run_flow(self): # 声明子流程 sub_pipeline = SubBuilder(root_id=self.root_id, data=copy.deepcopy(sub_flow_context)) + # 创建随机账号 + sub_pipeline.add_act( + act_name=_("create job user"), + act_component_code=SqlserverAddJobUserComponent.code, + kwargs=asdict( + CreateRandomJobUserKwargs( + cluster_ids=[cluster.id], + sid=create_sqlserver_login_sid(), + ), + ), + ) + sub_pipeline.add_act( act_name=_("下发执行器"), act_component_code=TransFileInWindowsComponent.code, @@ -101,6 +121,13 @@ def run_flow(self): ), ) + # 删除随机账号 + sub_pipeline.add_act( + act_name=_("drop job user"), + act_component_code=SqlserverDropJobUserComponent.code, + kwargs=asdict(DropRandomJobUserKwargs(cluster_ids=[cluster.id])), + ) + sub_pipelines.append(sub_pipeline.build_sub_process(sub_name=_("{}集群执行备份".format(cluster.name)))) main_pipeline.add_parallel_sub_pipeline(sub_flow_list=sub_pipelines) diff --git a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_restore_dbs.py b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_restore_dbs.py deleted file mode 100644 index 2b61f27382..0000000000 --- a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_restore_dbs.py +++ /dev/null @@ -1,34 +0,0 @@ -""" -TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-DB管理系统(BlueKing-BK-DBM) available. -Copyright (C) 2017-2023 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at https://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -from backend.flow.engine.bamboo.scene.common.builder import Builder -from backend.flow.engine.bamboo.scene.sqlserver.base_flow import BaseFlow - - -class SqlserverRestoreDBSFlow(BaseFlow): - """ - 构建sqlserver数据定点构造的抽象类 - 兼容跨云区域的场景支持 - """ - - def run_flow(self): - """ - 定义集群定点构造的流程,支持备份文件恢复和定点恢复 - 流程逻辑: - 1: 下发执行器到目标master实例 - 2: 下载文件到目标master机器指定目录 - 3: 恢复指定全量备份文件 - 4:恢复指点增量备份文件 (可选) - """ - # 定义主流程 - main_pipeline = Builder(root_id=self.root_id, data=self.data) - sub_pipelines = [] - - main_pipeline.add_parallel_sub_pipeline(sub_flow_list=sub_pipelines) - main_pipeline.run_pipeline() diff --git a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_slave_rebuild.py b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_slave_rebuild.py index 05c1bce088..c1a2da5c2c 100644 --- a/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_slave_rebuild.py +++ b/dbm-ui/backend/flow/engine/bamboo/scene/sqlserver/sqlserver_slave_rebuild.py @@ -28,17 +28,21 @@ ) from backend.flow.engine.bamboo.scene.sqlserver.sqlserver_add_slave import SqlserverAddSlaveFlow from backend.flow.plugins.components.collections.mysql.dns_manage import MySQLDnsManageComponent +from backend.flow.plugins.components.collections.sqlserver.create_random_job_user import SqlserverAddJobUserComponent +from backend.flow.plugins.components.collections.sqlserver.drop_random_job_user import SqlserverDropJobUserComponent from backend.flow.plugins.components.collections.sqlserver.exec_actuator_script import SqlserverActuatorScriptComponent from backend.flow.plugins.components.collections.sqlserver.sqlserver_db_meta import SqlserverDBMetaComponent from backend.flow.plugins.components.collections.sqlserver.trans_files import TransFileInWindowsComponent from backend.flow.utils.mysql.mysql_act_dataclass import UpdateDnsRecordKwargs from backend.flow.utils.sqlserver.sqlserver_act_dataclass import ( + CreateRandomJobUserKwargs, DBMetaOPKwargs, DownloadMediaKwargs, + DropRandomJobUserKwargs, ExecActuatorKwargs, ) from backend.flow.utils.sqlserver.sqlserver_act_payload import SqlserverActPayload -from backend.flow.utils.sqlserver.sqlserver_db_function import get_dbs_for_drs +from backend.flow.utils.sqlserver.sqlserver_db_function import create_sqlserver_login_sid, get_dbs_for_drs from backend.flow.utils.sqlserver.sqlserver_db_meta import SqlserverDBMeta from backend.flow.utils.sqlserver.sqlserver_host import Host from backend.flow.utils.sqlserver.validate import SqlserverCluster @@ -86,6 +90,18 @@ def slave_rebuild_in_local_flow(self): # 声明子流程 sub_pipeline = SubBuilder(root_id=self.root_id, data=copy.deepcopy(sub_flow_context)) + # 创建随机账号 + sub_pipeline.add_act( + act_name=_("create job user"), + act_component_code=SqlserverAddJobUserComponent.code, + kwargs=asdict( + CreateRandomJobUserKwargs( + cluster_ids=[cluster.id], + sid=create_sqlserver_login_sid(), + ), + ), + ) + # 下发执行器 sub_pipeline.add_act( act_name=_("下发执行器"), @@ -121,6 +137,13 @@ def slave_rebuild_in_local_flow(self): ) ) + # 删除随机账号 + sub_pipeline.add_act( + act_name=_("drop job user"), + act_component_code=SqlserverDropJobUserComponent.code, + kwargs=asdict(DropRandomJobUserKwargs(cluster_ids=[cluster.id])), + ) + sub_pipelines.append( sub_pipeline.build_sub_process( sub_name=_("{}集群slave[{}:{}]原地重建".format(cluster.name, info["slave_host"]["ip"], info["port"])) @@ -187,6 +210,19 @@ def slave_rebuild_in_new_slave_flow(self): old_slave = cluster.storageinstance_set.get(machine__ip=info["old_slave_host"]["ip"]) cluster_sub_pipeline = SubBuilder(root_id=self.root_id, data=copy.deepcopy(sub_flow_context)) + # 创建随机账号 + cluster_sub_pipeline.add_act( + act_name=_("create job user"), + act_component_code=SqlserverAddJobUserComponent.code, + kwargs=asdict( + CreateRandomJobUserKwargs( + cluster_ids=[cluster.id], + sid=create_sqlserver_login_sid(), + other_instances=[f"{info['new_slave_host']['ip']}:{old_slave.port}"], + ), + ), + ) + # 数据库建立新的同步关系 cluster_sub_pipeline.add_sub_pipeline( sub_flow=sync_dbs_for_cluster_sub_flow( @@ -219,6 +255,18 @@ def slave_rebuild_in_new_slave_flow(self): ) cluster_sub_pipeline.add_parallel_acts(acts_list=acts_list) + # 删除随机账号 + cluster_sub_pipeline.add_act( + act_name=_("drop job user"), + act_component_code=SqlserverDropJobUserComponent.code, + kwargs=asdict( + DropRandomJobUserKwargs( + cluster_ids=[cluster.id], + other_instances=[f"{info['new_slave_host']['ip']}:{old_slave.port}"], + ), + ), + ) + cluster_flows.append( cluster_sub_pipeline.build_sub_process(sub_name=_("[{}]集群与新slave建立关系".format(cluster.name))) ) diff --git a/dbm-ui/backend/flow/plugins/components/collections/sqlserver/create_random_job_user.py b/dbm-ui/backend/flow/plugins/components/collections/sqlserver/create_random_job_user.py index c041db0a80..2ed659f18d 100644 --- a/dbm-ui/backend/flow/plugins/components/collections/sqlserver/create_random_job_user.py +++ b/dbm-ui/backend/flow/plugins/components/collections/sqlserver/create_random_job_user.py @@ -10,9 +10,15 @@ import logging +from django.db.models import QuerySet +from django.utils.translation import ugettext as _ from pipeline.component_framework.component import Component +from backend.db_meta.enums import InstanceStatus +from backend.db_meta.exceptions import ClusterNotExistException +from backend.db_meta.models import Cluster from backend.flow.plugins.components.collections.common.base_service import BaseService +from backend.flow.utils.sqlserver.sqlserver_db_function import create_sqlserver_random_job_user logger = logging.getLogger("flow") @@ -21,10 +27,73 @@ class SqlserverAddJobUserService(BaseService): """ 为Sqlserver单据添加job的临时本地账号,操作目标实例 单据是以集群维度来添加,如果单据涉及到集群,应该统一添加账号密码,以便后续操作方便 - todo """ + def _add_job_user( + self, + job_root_id: str, + pwd: str, + sid: str, + storages: QuerySet, + other_instances: list, + bk_cloud_id: int, + ) -> bool: + # 执行添加随机账号 + ret = create_sqlserver_random_job_user( + job_root_id=job_root_id, + pwd=pwd, + sid=sid, + storages=storages, + other_instances=other_instances, + bk_cloud_id=bk_cloud_id, + ) + + # 判断结果 + is_error = False + for info in ret: + if info["error_msg"]: + self.log_error(f"add_job_user in instance [{info['address']}]: err: [{info['error_msg']}]") + if info["address"] in other_instances: + # 如果在集群之外的实例报错,直接异常 + is_error = True + + inst_status = storages.get( + machine__ip=info["address"].split(":")[0], port=int(info["address"].split(":")[1]) + ) + if inst_status == InstanceStatus.UNAVAILABLE: + # 如果实例的状态本身是unavailable,则失败可以忽略 + self.log_warning(f"the instance [{info['address']}] is already unavailable, ignore") + continue + is_error = True + + return is_error + def _execute(self, data, parent_data, callback=None) -> bool: + kwargs = data.get_one_of_inputs("kwargs") + global_data = data.get_one_of_inputs("global_data") + + for cluster_id in kwargs["cluster_ids"]: + # 获取每个cluster_id对应的对象 + try: + cluster = Cluster.objects.get(id=cluster_id, bk_biz_id=global_data["bk_biz_id"]) + except Cluster.DoesNotExist: + raise ClusterNotExistException( + cluster_id=cluster_id, bk_biz_id=global_data["bk_biz_id"], message=_("集群不存在") + ) + + # 执行 + if self._add_job_user( + job_root_id=global_data["job_root_id"], + pwd=global_data["job_root_id"], + sid=kwargs["sid"], + storages=cluster.storageinstance_set.all(), + other_instances=kwargs["other_instances"], + bk_cloud_id=cluster.bk_cloud_id, + ): + self.log_error(f"execute add random-job-user failed in cluster [{cluster.name}]") + return False + + self.log_info(f"execute add job random-job-user successfully in cluster [{cluster.name}]") return True diff --git a/dbm-ui/backend/flow/plugins/components/collections/sqlserver/drop_random_job_user.py b/dbm-ui/backend/flow/plugins/components/collections/sqlserver/drop_random_job_user.py index 3703ca66b6..aa4c819ab0 100644 --- a/dbm-ui/backend/flow/plugins/components/collections/sqlserver/drop_random_job_user.py +++ b/dbm-ui/backend/flow/plugins/components/collections/sqlserver/drop_random_job_user.py @@ -10,9 +10,15 @@ import logging +from django.db.models import QuerySet +from django.utils.translation import ugettext as _ from pipeline.component_framework.component import Component +from backend.db_meta.enums import InstanceStatus +from backend.db_meta.exceptions import ClusterNotExistException +from backend.db_meta.models import Cluster from backend.flow.plugins.components.collections.common.base_service import BaseService +from backend.flow.utils.sqlserver.sqlserver_db_function import drop_sqlserver_random_job_user logger = logging.getLogger("flow") @@ -20,10 +26,55 @@ class SqlserverDropJobUserService(BaseService): """ 为Sqlserver单据删除job的临时本地账号 - todo """ + def _drop_job_user(self, job_root_id: str, bk_cloud_id: int, storages: QuerySet, other_instances: list) -> bool: + + ret = drop_sqlserver_random_job_user( + job_root_id=job_root_id, bk_cloud_id=bk_cloud_id, storages=storages, other_instances=other_instances + ) + + is_error = False + for info in ret: + if info["error_msg"]: + self.log_error(f"drop_job_user in instance [{info['address']}]: err: [{info['error_msg']}]") + if info["address"] in other_instances: + # 如果在集群之外的实例报错,直接异常 + is_error = True + + inst_status = storages.get( + machine__ip=info["address"].split(":")[0], port=int(info["address"].split(":")[1]) + ) + if inst_status == InstanceStatus.UNAVAILABLE: + # 如果实例的状态本身是unavailable,则失败可以忽略 + self.log_warning(f"the instance [{info['address']}] is already unavailable, ignore") + continue + is_error = True + + return is_error + def _execute(self, data, parent_data, callback=None) -> bool: + kwargs = data.get_one_of_inputs("kwargs") + global_data = data.get_one_of_inputs("global_data") + + for cluster_id in kwargs["cluster_ids"]: + # 获取每个cluster_id对应的对象 + try: + cluster = Cluster.objects.get(id=cluster_id, bk_biz_id=global_data["bk_biz_id"]) + except Cluster.DoesNotExist: + raise ClusterNotExistException( + cluster_id=cluster_id, bk_biz_id=global_data["bk_biz_id"], message=_("集群不存在") + ) + if self._drop_job_user( + job_root_id=global_data["job_root_id"], + storages=cluster.storageinstance_set.all(), + other_instances=kwargs["other_instances"], + bk_cloud_id=cluster.bk_cloud_id, + ): + self.log_error(f"execute drop random-job-user failed in cluster [{cluster.name}]") + return False + + self.log_info(f"execute drop random-job-user successfully in cluster [{cluster.name}]") return True diff --git a/dbm-ui/backend/flow/utils/sqlserver/payload_handler.py b/dbm-ui/backend/flow/utils/sqlserver/payload_handler.py index 5ce092f0b9..ade57670aa 100644 --- a/dbm-ui/backend/flow/utils/sqlserver/payload_handler.py +++ b/dbm-ui/backend/flow/utils/sqlserver/payload_handler.py @@ -20,6 +20,7 @@ from backend.db_proxy.constants import ExtensionType from backend.db_proxy.models import DBExtension from backend.flow.consts import DEFAULT_INSTANCE, MSSQL_ADMIN, MSSQL_EXPORTER, SqlserverComponent, SqlserverUserName +from backend.flow.utils.mysql.get_mysql_sys_user import generate_mysql_tmp_user logger = logging.getLogger("flow") @@ -46,26 +47,14 @@ def get_sqlserver_drs_account(bk_cloud_id: int): "drs_pwd": AsymmetricHandler.decrypt(name=bk_cloud_name, content=drs.details["pwd"]), } - @staticmethod - def get_sqlserver_account(): + def get_sqlserver_account(self): """ - 获取sqlserver实例sa内置帐户密码,后续做单据的临时sa账号随机化 todo + 获取sqlserver实例sa内置帐户密码,用单据的临时sa账号随机化 """ - user_map = {} - value_to_name = {member.value: member.name.lower() for member in SqlserverUserName} - data = DBPrivManagerApi.get_password( - { - "instances": [DEFAULT_INSTANCE], - "users": [ - {"username": SqlserverUserName.SA.value, "component": SqlserverComponent.SQLSERVER.value}, - ], - } - ) - for user in data["items"]: - user_map[value_to_name[user["username"]] + "_user"] = user["username"] - user_map[value_to_name[user["username"]] + "_pwd"] = base64.b64decode(user["password"]).decode("utf-8") - - return user_map + return { + "sa_user": generate_mysql_tmp_user(self.global_data["job_root_id"]), + "sa_pwd": self.global_data["job_root_id"], + } @staticmethod def get_init_system_account(): diff --git a/dbm-ui/backend/flow/utils/sqlserver/sqlserver_act_dataclass.py b/dbm-ui/backend/flow/utils/sqlserver/sqlserver_act_dataclass.py index e7705e5a1a..49ea9487d8 100644 --- a/dbm-ui/backend/flow/utils/sqlserver/sqlserver_act_dataclass.py +++ b/dbm-ui/backend/flow/utils/sqlserver/sqlserver_act_dataclass.py @@ -181,6 +181,32 @@ class DownloadBackupFileKwargs: get_backup_file_info_var: str +@dataclass() +class CreateRandomJobUserKwargs: + """ + 定义执行sqlserver_add_job_user活动节点的私有变量结构体 + @attributes cluster_ids 集群id列表 + @attributes user 随机账号名称 + @attributes sid 随机账号sid + """ + + cluster_ids: list + sid: str + other_instances: list = field(default_factory=list) + + +@dataclass() +class DropRandomJobUserKwargs: + """ + 定义执行sqlserver_add_job_user活动节点的私有变量结构体 + @attributes cluster_ids 集群id列表 + @attributes user 随机账号名称 + """ + + cluster_ids: list + other_instances: list = field(default_factory=list) + + @dataclass() class SqlserverDBConstructContext: """ diff --git a/dbm-ui/backend/flow/utils/sqlserver/sqlserver_act_payload.py b/dbm-ui/backend/flow/utils/sqlserver/sqlserver_act_payload.py index ab49f711ee..39055a5e14 100644 --- a/dbm-ui/backend/flow/utils/sqlserver/sqlserver_act_payload.py +++ b/dbm-ui/backend/flow/utils/sqlserver/sqlserver_act_payload.py @@ -22,8 +22,7 @@ class SqlserverActPayload(PayloadHandler): - @staticmethod - def system_init_payload(**kwargs) -> dict: + def system_init_payload(self, **kwargs) -> dict: """ 系统初始化payload """ @@ -31,7 +30,7 @@ def system_init_payload(**kwargs) -> dict: return { "db_type": DBActuatorTypeEnum.Default.value, "action": SqlserverActuatorActionEnum.SysInit.value, - "payload": PayloadHandler.get_init_system_account(), + "payload": self.get_init_system_account(), } def get_install_sqlserver_payload(self, **kwargs) -> dict: @@ -75,9 +74,7 @@ def get_install_sqlserver_payload(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.Deploy.value, "payload": { - "general": { - "runtime_account": PayloadHandler.get_create_sqlserver_account(self.global_data["bk_cloud_id"]) - }, + "general": {"runtime_account": self.get_create_sqlserver_account(self.global_data["bk_cloud_id"])}, "extend": { "host": kwargs["ips"][0]["ip"], "pkg": sqlserver_pkg.name, @@ -101,7 +98,7 @@ def get_execute_sql_payload(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.ExecSQLFiles.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "ports": self.global_data["ports"], @@ -120,7 +117,7 @@ def get_backup_dbs_payload(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.BackupDBS.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": kwargs["custom_params"]["port"], @@ -141,7 +138,7 @@ def get_backup_log_dbs_payload(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.BackupDBS.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": kwargs["custom_params"]["port"], @@ -161,7 +158,7 @@ def get_rename_dbs_payload(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.RenameDBS.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": self.global_data["port"], @@ -180,7 +177,7 @@ def get_clean_dbs_payload(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.CleanDBS.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": self.global_data["port"], @@ -202,7 +199,7 @@ def get_switch_payload(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.RoleSwitch.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": self.global_data["port"], @@ -222,7 +219,7 @@ def get_check_abnormal_db_payload(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver_check.value, "action": SqlserverActuatorActionEnum.CheckAbnormalDB.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": self.global_data["port"], @@ -238,7 +235,7 @@ def get_check_inst_process_payload(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver_check.value, "action": SqlserverActuatorActionEnum.CheckInstProcess.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": self.global_data["port"], @@ -254,7 +251,7 @@ def get_clone_user_payload(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.CloneLoginUsers.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": self.global_data["port"], @@ -272,7 +269,7 @@ def get_clone_jobs_payload(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.CloneJobs.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": self.global_data["port"], @@ -290,7 +287,7 @@ def get_clone_linkserver_payload(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.CloneLinkservers.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": self.global_data["port"], @@ -300,8 +297,7 @@ def get_clone_linkserver_payload(self, **kwargs) -> dict: }, } - @staticmethod - def get_restore_full_dbs_payload(**kwargs) -> dict: + def get_restore_full_dbs_payload(self, **kwargs) -> dict: """ 恢复全量备份的payload """ @@ -309,7 +305,7 @@ def get_restore_full_dbs_payload(**kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.RestoreDBSForFull.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": kwargs["custom_params"]["port"], @@ -319,8 +315,7 @@ def get_restore_full_dbs_payload(**kwargs) -> dict: }, } - @staticmethod - def get_restore_log_dbs_payload(**kwargs) -> dict: + def get_restore_log_dbs_payload(self, **kwargs) -> dict: """ 恢复增量备份的payload """ @@ -328,7 +323,7 @@ def get_restore_log_dbs_payload(**kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.RestoreDBSForLog.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": kwargs["custom_params"]["port"], @@ -347,7 +342,7 @@ def get_build_database_mirroring(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.BuildDBMirroring.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": self.global_data["port"], @@ -366,7 +361,7 @@ def get_build_add_dbs_in_always_on(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.AddDBSInAlwaysOn.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": self.global_data["port"], @@ -376,8 +371,7 @@ def get_build_add_dbs_in_always_on(self, **kwargs) -> dict: }, } - @staticmethod - def get_build_always_on(**kwargs) -> dict: + def get_build_always_on(self, **kwargs) -> dict: """ 建立实例加入always_on可用组的payload """ @@ -385,7 +379,7 @@ def get_build_always_on(**kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.BuildAlwaysOn.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": kwargs["custom_params"]["port"], @@ -396,8 +390,7 @@ def get_build_always_on(**kwargs) -> dict: }, } - @staticmethod - def get_init_machine_for_always_on(**kwargs) -> dict: + def get_init_machine_for_always_on(self, **kwargs) -> dict: """ 建立always_on可用组之前初始化机器的payload """ @@ -405,7 +398,7 @@ def get_init_machine_for_always_on(**kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.InitForAlwaysOn.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "port": kwargs["custom_params"]["port"], @@ -423,7 +416,7 @@ def uninstall_sqlserver(self, **kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.Uninstall.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "host": kwargs["ips"][0]["ip"], "ports": self.global_data["custom_params"]["ports"], @@ -432,8 +425,7 @@ def uninstall_sqlserver(self, **kwargs) -> dict: }, } - @staticmethod - def check_backup_file_is_in_local(**kwargs) -> dict: + def check_backup_file_is_in_local(self, **kwargs) -> dict: """ 移动备份文件 """ @@ -441,7 +433,7 @@ def check_backup_file_is_in_local(**kwargs) -> dict: "db_type": DBActuatorTypeEnum.Sqlserver.value, "action": SqlserverActuatorActionEnum.MoveBackupFile.value, "payload": { - "general": {"runtime_account": PayloadHandler.get_sqlserver_account()}, + "general": {"runtime_account": self.get_sqlserver_account()}, "extend": { "target_path": kwargs["custom_params"]["target_path"], "file_list": kwargs["custom_params"]["file_list"], diff --git a/dbm-ui/backend/flow/utils/sqlserver/sqlserver_db_function.py b/dbm-ui/backend/flow/utils/sqlserver/sqlserver_db_function.py index 06a90be147..a73d8e57f3 100644 --- a/dbm-ui/backend/flow/utils/sqlserver/sqlserver_db_function.py +++ b/dbm-ui/backend/flow/utils/sqlserver/sqlserver_db_function.py @@ -8,9 +8,12 @@ specific language governing permissions and limitations under the License. """ import re +import secrets from collections import defaultdict from typing import Dict, List +from django.db.models import QuerySet + from backend.components import DRSApi from backend.db_meta.enums import InstanceRole from backend.db_meta.models import Cluster, StorageInstance @@ -22,6 +25,7 @@ SqlserverSyncMode, ) from backend.flow.utils.mysql.db_table_filter import DbTableFilter +from backend.flow.utils.mysql.get_mysql_sys_user import generate_mysql_tmp_user def sqlserver_match_dbs( @@ -378,3 +382,80 @@ def get_cluster_database(cluster_ids: List[int]) -> Dict[int, List[str]]: cluster_dbs_info = get_cluster_database_with_cloud(bk_cloud_id, clusters) cluster_id__database.update(cluster_dbs_info) return cluster_id__database + + +def get_instance_time_zone(instance: StorageInstance) -> str: + """ + 获取实例配置的时区信息 + """ + ret = DRSApi.sqlserver_rpc( + { + "bk_cloud_id": instance.machine.bk_cloud_id, + "addresses": [instance.ip_port], + "cmds": ["select DATENAME(TzOffset, SYSDATETIMEOFFSET()) as time_zone"], + "force": False, + } + ) + if ret[0]["error_msg"]: + raise Exception(f"[{instance.ip_port}] get_time_zone failed: {ret[0]['error_msg']}") + + return ret[0]["cmd_results"][0]["table_data"][0]["time_zone"] + + +def create_sqlserver_login_sid() -> str: + """ + 生成login的sid,sid格式:"0x" + 32位16进制字符串 + """ + num_bytes = 16 # 每个字节对应两个十六进制字符 + random_bytes = secrets.token_bytes(num_bytes) + hex_string = random_bytes.hex() + return "0x" + hex_string + + +def create_sqlserver_random_job_user( + job_root_id: str, sid: str, pwd: str, storages: QuerySet, other_instances: list, bk_cloud_id: int +) -> list: + """ + 创建随机账号的基本函数 + @param job_root_id: 任务root_id + @param sid: 用户的sid + @param pwd: 用户密码 + @param storages: 添加随机账号的实例 + @param other_instances: 作为额外的实例传入,目标是满足集群添加实例且没有暂时没有元数据的场景, 每个元素是ip:port字符串 + @param bk_cloud_id: 云区域id + """ + user = generate_mysql_tmp_user(job_root_id) + create_cmds = [ + f"use master IF SUSER_SID('{user}') IS NOT NULL drop login [{user}];" + f"CREATE LOGIN {user} WITH PASSWORD=N'{pwd}', DEFAULT_DATABASE=[MASTER],SID={sid},CHECK_POLICY=OFF;" + f"EXEC sp_addsrvrolemember @loginame = '{user}', @rolename = N'sysadmin';", + ] + return DRSApi.sqlserver_rpc( + { + "bk_cloud_id": bk_cloud_id, + "addresses": [s.ip_port for s in storages] + other_instances, + "cmds": create_cmds, + "force": False, + } + ) + + +def drop_sqlserver_random_job_user( + job_root_id: str, bk_cloud_id: int, storages: QuerySet, other_instances: list +) -> list: + """ + 删除随机账号的基本函数 + @param job_root_id: 任务root_id + @param storages: 添加随机账号的实例 + @param other_instances: 作为额外的实例传入,目标是满足集群添加实例且没有暂时没有元数据的场景, 每个元素是ip:port字符串 + @param bk_cloud_id: 云区域id + """ + user = generate_mysql_tmp_user(job_root_id) + return DRSApi.sqlserver_rpc( + { + "bk_cloud_id": bk_cloud_id, + "addresses": [s.ip_port for s in storages] + other_instances, + "cmds": [f"use master IF SUSER_SID('{user}') IS NOT NULL drop login [{user}]"], + "force": False, + } + ) diff --git a/dbm-ui/backend/flow/utils/sqlserver/sqlserver_db_meta.py b/dbm-ui/backend/flow/utils/sqlserver/sqlserver_db_meta.py index 81ce7110be..09812d74be 100644 --- a/dbm-ui/backend/flow/utils/sqlserver/sqlserver_db_meta.py +++ b/dbm-ui/backend/flow/utils/sqlserver/sqlserver_db_meta.py @@ -46,7 +46,7 @@ def sqlserver_single_apply(self): clusters=self.global_data["clusters"], db_module_id=self.global_data["db_module_id"], creator=self.global_data["created_by"], - time_zone="+08:00", + time_zone="", bk_cloud_id=int(self.global_data["bk_cloud_id"]), resource_spec=self.global_data.get("resource_spec", def_resource_spec), region=self.global_data["region"], @@ -66,7 +66,7 @@ def sqlserver_ha_apply(self): slave_ip=self.global_data["slave_ip"], clusters=self.global_data["clusters"], creator=self.global_data["created_by"], - time_zone="+08:00", + time_zone="", bk_cloud_id=int(self.global_data["bk_cloud_id"]), resource_spec=self.global_data.get("resource_spec", def_resource_spec), region=self.global_data["region"],