diff --git a/src/dashboard/apigateway/apigateway/apps/metrics/constants.py b/src/dashboard/apigateway/apigateway/apps/metrics/constants.py index c553ca177..31654cbee 100644 --- a/src/dashboard/apigateway/apigateway/apps/metrics/constants.py +++ b/src/dashboard/apigateway/apigateway/apps/metrics/constants.py @@ -16,30 +16,21 @@ # We undertake not to change the open source license (MIT license) applicable # to the current version of the project delivered to anyone in the future. # -from enum import Enum +from blue_krill.data_types.enum import EnumField, StructuredEnum -from apigateway.common.constants import ChoiceEnumMixin -JOB_NAME = "apigateway" +class MetricsEnum(StructuredEnum): + REQUESTS = EnumField("requests") + FAILED_REQUESTS = EnumField("failed_requests") + RESPONSE_TIME_95TH = EnumField("response_time_95th") + RESPONSE_TIME_90TH = EnumField("response_time_90th") + RESPONSE_TIME_80TH = EnumField("response_time_80th") + RESPONSE_TIME_50TH = EnumField("response_time_50th") -class MetricsEnum(ChoiceEnumMixin, Enum): - REQUESTS = "requests" - FAILED_REQUESTS = "failed_requests" - RESPONSE_TIME_95TH = "response_time_95th" - RESPONSE_TIME_90TH = "response_time_90th" - RESPONSE_TIME_80TH = "response_time_80th" - RESPONSE_TIME_50TH = "response_time_50th" - - -class DimensionEnum(ChoiceEnumMixin, Enum): - ALL = "all" - APP = "app" - RESOURCE = "resource" +class DimensionEnum(StructuredEnum): + ALL = EnumField("all") + APP = EnumField("app") + RESOURCE = EnumField("resource") # 资源+非200状态码 - RESOURCE_NON200_STATUS = "resource_non200_status" - - -class StatisticsIntervalEnum(ChoiceEnumMixin, Enum): - HOUR = "hour" - DAY = "day" + RESOURCE_NON200_STATUS = EnumField("resource_non200_status") diff --git a/src/dashboard/apigateway/apigateway/apps/metrics/dimension_metrics.py b/src/dashboard/apigateway/apigateway/apps/metrics/dimension_metrics.py new file mode 100644 index 000000000..90f0a3525 --- /dev/null +++ b/src/dashboard/apigateway/apigateway/apps/metrics/dimension_metrics.py @@ -0,0 +1,255 @@ +# -*- coding: utf-8 -*- +# +# TencentBlueKing is pleased to support the open source community by making +# 蓝鲸智云 - API 网关(BlueKing - APIGateway) available. +# Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. +# Licensed under the MIT License (the "License"); you may not use this file except +# in compliance with the License. You may obtain a copy of the License at +# +# http://opensource.org/licenses/MIT +# +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific language governing permissions and +# limitations under the License. +# +# We undertake not to change the open source license (MIT license) applicable +# to the current version of the project delivered to anyone in the future. +# +from abc import ABC, abstractmethod +from typing import ClassVar, Dict, List, Optional, Tuple, Type + +from django.conf import settings + +from apigateway.apps.metrics.constants import DimensionEnum, MetricsEnum +from apigateway.common.error_codes import error_codes +from apigateway.components.prometheus import prometheus_component + + +class BasePrometheusMetrics(ABC): + default_labels = getattr(settings, "PROMETHEUS_DEFAULT_LABELS", []) + metric_name_prefix = getattr(settings, "PROMETHEUS_METRIC_NAME_PREFIX", "") + + def _get_labels_expression(self, labels: List[Tuple[str, str, Optional[str]]]) -> str: + return ", ".join( + [f'{label}{expression}"{value}"' for label, expression, value in labels if value not in [None, ""]] + ) + + +class BaseDimensionMetrics(BasePrometheusMetrics): + dimension: ClassVar[DimensionEnum] + metrics: ClassVar[MetricsEnum] + + @abstractmethod + def _get_query_promql(self, gateway_name: str, stage_name: str, resource_name: Optional[str], step: str) -> str: + pass + + def query_range( + self, + gateway_name: str, + stage_name: str, + resource_name: Optional[str], + start: int, + end: int, + step: str, + ): + # generate query expression + promql = self._get_query_promql(gateway_name, stage_name, resource_name, step) + + # request prometheus http api to get metrics data + return prometheus_component.query_range( + bk_biz_id=getattr(settings, "BCS_CLUSTER_BK_BIZ_ID", ""), + promql=promql, + start=start, + end=end, + step=step, + ) + + +class RequestsMetrics(BaseDimensionMetrics): + dimension = DimensionEnum.ALL + metrics = MetricsEnum.REQUESTS + + def _get_query_promql(self, gateway_name: str, stage_name: str, resource_name: Optional[str], step: str) -> str: + labels = self._get_labels_expression( + [ + *self.default_labels, + ("api_name", "=", gateway_name), + ("stage_name", "=", stage_name), + ("resource_name", "=", resource_name), + ] + ) + return f"sum(increase({self.metric_name_prefix}apigateway_api_requests_total{{" f"{labels}" f"}}[{step}]))" + + +class FailedRequestsMetrics(BaseDimensionMetrics): + dimension = DimensionEnum.ALL + metrics = MetricsEnum.FAILED_REQUESTS + + def _get_query_promql(self, gateway_name: str, stage_name: str, resource_name: Optional[str], step: str) -> str: + labels = self._get_labels_expression( + [ + *self.default_labels, + ("api_name", "=", gateway_name), + ("stage_name", "=", stage_name), + ("resource_name", "=", resource_name), + ("proxy_error", "=", "1"), + ] + ) + return f"sum(increase({self.metric_name_prefix}apigateway_api_requests_total{{" f"{labels}" f"}}[{step}]))" + + +class BaseResponseTimePercentileMetrics(BaseDimensionMetrics): + quantile = 1.0 + + def _get_query_promql(self, gateway_name: str, stage_name: str, resource_name: Optional[str], step: str) -> str: + labels = self._get_labels_expression( + [ + *self.default_labels, + ("api_name", "=", gateway_name), + ("stage_name", "=", stage_name), + ("resource_name", "=", resource_name), + ] + ) + return ( + f"histogram_quantile({self.quantile}, " + f"sum(rate({self.metric_name_prefix}apigateway_api_request_duration_milliseconds_bucket{{" + f"{labels}" + f"}}[{step}])) by (le, api_name))" + ) + + +class ResponseTime95thMetrics(BaseResponseTimePercentileMetrics): + dimension = DimensionEnum.ALL + metrics = MetricsEnum.RESPONSE_TIME_95TH + quantile = 0.95 + + +class ResponseTime90thMetrics(BaseResponseTimePercentileMetrics): + dimension = DimensionEnum.ALL + metrics = MetricsEnum.RESPONSE_TIME_90TH + quantile = 0.90 + + +class ResponseTime80thMetrics(BaseResponseTimePercentileMetrics): + dimension = DimensionEnum.ALL + metrics = MetricsEnum.RESPONSE_TIME_80TH + quantile = 0.80 + + +class ResponseTime50thMetrics(BaseResponseTimePercentileMetrics): + dimension = DimensionEnum.ALL + metrics = MetricsEnum.RESPONSE_TIME_50TH + quantile = 0.50 + + +class ResourceRequestsMetrics(BaseDimensionMetrics): + dimension = DimensionEnum.RESOURCE + metrics = MetricsEnum.REQUESTS + + def _get_query_promql(self, gateway_name: str, stage_name: str, resource_name: Optional[str], step: str) -> str: + labels = self._get_labels_expression( + [ + *self.default_labels, + ("api_name", "=", gateway_name), + ("stage_name", "=", stage_name), + ("resource_name", "=", resource_name), + ] + ) + return ( + f"topk(10, sum(increase({self.metric_name_prefix}apigateway_api_requests_total{{" + f"{labels}" + f"}}[{step}])) by (api_name, resource_name, matched_uri))" + ) + + +class ResourceFailedRequestsMetrics(BaseDimensionMetrics): + dimension = DimensionEnum.RESOURCE + metrics = MetricsEnum.FAILED_REQUESTS + + def _get_query_promql(self, gateway_name: str, stage_name: str, resource_name: Optional[str], step: str) -> str: + labels = self._get_labels_expression( + [ + *self.default_labels, + ("api_name", "=", gateway_name), + ("stage_name", "=", stage_name), + ("resource_name", "=", resource_name), + ("proxy_error", "=", "1"), + ] + ) + return ( + f"topk(10, sum(increase({self.metric_name_prefix}apigateway_api_requests_total{{" + f"{labels}" + f"}}[{step}])) by (api_name, resource_name, matched_uri))" + ) + + +class AppRequestsMetrics(BaseDimensionMetrics): + dimension = DimensionEnum.APP + metrics = MetricsEnum.REQUESTS + + def _get_query_promql(self, gateway_name: str, stage_name: str, resource_name: Optional[str], step: str) -> str: + labels = self._get_labels_expression( + [ + *self.default_labels, + ("api_name", "=", gateway_name), + ("stage_name", "=", stage_name), + ("resource_name", "=", resource_name), + ] + ) + return ( + f"topk(10, sum(increase({self.metric_name_prefix}apigateway_app_requests_total{{" + f"{labels}" + f"}}[{step}])) by (api_name, app_code))" + ) + + +class ResourceNon200StatusRequestsMetrics(BaseDimensionMetrics): + dimension = DimensionEnum.RESOURCE_NON200_STATUS + metrics = MetricsEnum.REQUESTS + + def _get_query_promql(self, gateway_name: str, stage_name: str, resource_name: Optional[str], step: str) -> str: + labels = self._get_labels_expression( + [ + *self.default_labels, + ("api_name", "=", gateway_name), + ("stage_name", "=", stage_name), + ("resource_name", "=", resource_name), + ("status", "!=", "200"), + ] + ) + return ( + f"topk(10, sum(increase({self.metric_name_prefix}apigateway_api_requests_total{{" + f"{labels}" + f"}}[{step}])) by (api_name, resource_name, matched_uri, status))" + ) + + +class DimensionMetricsFactory: + # map: dimension -> metrics -> dimension_metrics_class + _registry: Dict[DimensionEnum, Dict[MetricsEnum, Type[BaseDimensionMetrics]]] = {} + + @classmethod + def create_dimension_metrics(cls, dimension: DimensionEnum, metrics: MetricsEnum) -> BaseDimensionMetrics: + _class = cls._registry.get(dimension, {}).get(metrics) + if not _class: + raise error_codes.INVALID_ARGS.format(f"unsupported dimension={dimension.value}, metrics={metrics.value}") + return _class() + + @classmethod + def register(cls, dimension_metrics_class: Type[BaseDimensionMetrics]): + _class = dimension_metrics_class + cls._registry.setdefault(_class.dimension, {}) + cls._registry[_class.dimension][_class.metrics] = dimension_metrics_class + + +DimensionMetricsFactory.register(RequestsMetrics) +DimensionMetricsFactory.register(FailedRequestsMetrics) +DimensionMetricsFactory.register(ResponseTime95thMetrics) +DimensionMetricsFactory.register(ResponseTime90thMetrics) +DimensionMetricsFactory.register(ResponseTime80thMetrics) +DimensionMetricsFactory.register(ResponseTime50thMetrics) +DimensionMetricsFactory.register(ResourceRequestsMetrics) +DimensionMetricsFactory.register(ResourceFailedRequestsMetrics) +DimensionMetricsFactory.register(AppRequestsMetrics) +DimensionMetricsFactory.register(ResourceNon200StatusRequestsMetrics) diff --git a/src/dashboard/apigateway/apigateway/apps/metrics/helpers.py b/src/dashboard/apigateway/apigateway/apps/metrics/helpers.py deleted file mode 100644 index 04a2a99af..000000000 --- a/src/dashboard/apigateway/apigateway/apps/metrics/helpers.py +++ /dev/null @@ -1,257 +0,0 @@ -# -*- coding: utf-8 -*- -# -# TencentBlueKing is pleased to support the open source community by making -# 蓝鲸智云 - API 网关(BlueKing - APIGateway) available. -# Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. -# Licensed under the MIT License (the "License"); you may not use this file except -# in compliance with the License. You may obtain a copy of the License at -# -# http://opensource.org/licenses/MIT -# -# Unless required by applicable law or agreed to in writing, software distributed under -# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, -# either express or implied. See the License for the specific language governing permissions and -# limitations under the License. -# -# We undertake not to change the open source license (MIT license) applicable -# to the current version of the project delivered to anyone in the future. -# -from abc import ABC, abstractmethod -from typing import Dict, Optional, Text - -from django.conf import settings - -from apigateway.common.error_codes import error_codes - -from .constants import JOB_NAME, DimensionEnum, MetricsEnum - - -class BasePrometheusMetrics(ABC): - job_name = JOB_NAME - default_labels = getattr(settings, "PROMETHEUS_DEFAULT_LABELS", []) - metric_name_prefix = getattr(settings, "PROMETHEUS_METRIC_NAME_PREFIX", "") - - def _get_labels_expression(self, labels): - return ", ".join( - [f'{label}{expression}"{value}"' for label, expression, value in labels if value not in [None, ""]] - ) - - -class BaseDimensionMetrics(BasePrometheusMetrics): - @abstractmethod - def get_query_expression(self, gateway_id: int, stage_name: str, resource_id: Optional[int], step: str) -> str: - pass - - -class DimensionMetricsManager: - # map: dimension -> metrics -> dimension_metrics_class - _registry: Dict[Text, Dict[Text, BaseDimensionMetrics]] = {} - - @classmethod - def create_dimension_metrics(cls, dimension, metrics): - _class = cls._registry.get(dimension, {}).get(metrics) - if not _class: - raise error_codes.INVALID_ARGS.format(f"unsupported grant_dimension={dimension}, metrics={metrics}") - return _class() - - @classmethod - def register(cls, dimension_metrics_class): - _class = dimension_metrics_class - cls._registry.setdefault(_class.dimension, {}) - cls._registry[_class.dimension][_class.metrics] = dimension_metrics_class - - -class RequestsMetrics(BaseDimensionMetrics): - dimension = DimensionEnum.ALL.value - metrics = MetricsEnum.REQUESTS.value - - def get_query_expression(self, gateway_id: int, stage_name: str, resource_id: Optional[int], step: str) -> str: - labels = self._get_labels_expression( - [ - *self.default_labels, - ("job", "=", self.job_name), - ("api", "=", gateway_id), - ("stage", "=", stage_name), - ("resource", "=", resource_id), - ] - ) - return f"sum(increase({self.metric_name_prefix}apigateway_api_requests_total{{" f"{labels}" f"}}[{step}]))" - - -DimensionMetricsManager.register(RequestsMetrics) - - -class FailedRequestsMetrics(BaseDimensionMetrics): - dimension = DimensionEnum.ALL.value - metrics = MetricsEnum.FAILED_REQUESTS.value - - def get_query_expression(self, gateway_id: int, stage_name: str, resource_id: Optional[int], step: str) -> str: - labels = self._get_labels_expression( - [ - *self.default_labels, - ("job", "=", self.job_name), - ("api", "=", gateway_id), - ("stage", "=", stage_name), - ("resource", "=", resource_id), - ("proxy_error", "=", "1"), - ] - ) - return f"sum(increase({self.metric_name_prefix}apigateway_api_requests_total{{" f"{labels}" f"}}[{step}]))" - - -DimensionMetricsManager.register(FailedRequestsMetrics) - - -class BaseResponseTimePercentileMetrics(BaseDimensionMetrics): - quantile = 1.0 - - def get_query_expression(self, gateway_id: int, stage_name: str, resource_id: Optional[int], step: str) -> str: - labels = self._get_labels_expression( - [ - *self.default_labels, - ("job", "=", self.job_name), - ("api", "=", gateway_id), - ("stage", "=", stage_name), - ("resource", "=", resource_id), - ] - ) - return ( - f"histogram_quantile({self.quantile}, " - f"sum(rate({self.metric_name_prefix}apigateway_api_request_duration_milliseconds_bucket{{" - f"{labels}" - f"}}[{step}])) by (le, api))" - ) - - -class ResponseTime95thMetrics(BaseResponseTimePercentileMetrics): - dimension = DimensionEnum.ALL.value - metrics = MetricsEnum.RESPONSE_TIME_95TH.value - quantile = 0.95 - - -DimensionMetricsManager.register(ResponseTime95thMetrics) - - -class ResponseTime90thMetrics(BaseResponseTimePercentileMetrics): - dimension = DimensionEnum.ALL.value - metrics = MetricsEnum.RESPONSE_TIME_90TH.value - quantile = 0.90 - - -DimensionMetricsManager.register(ResponseTime90thMetrics) - - -class ResponseTime80thMetrics(BaseResponseTimePercentileMetrics): - dimension = DimensionEnum.ALL.value - metrics = MetricsEnum.RESPONSE_TIME_80TH.value - quantile = 0.80 - - -DimensionMetricsManager.register(ResponseTime80thMetrics) - - -class ResponseTime50thMetrics(BaseResponseTimePercentileMetrics): - dimension = DimensionEnum.ALL.value - metrics = MetricsEnum.RESPONSE_TIME_50TH.value - quantile = 0.50 - - -DimensionMetricsManager.register(ResponseTime50thMetrics) - - -class ResourceRequestsMetrics(BaseDimensionMetrics): - dimension = DimensionEnum.RESOURCE.value - metrics = MetricsEnum.REQUESTS.value - - def get_query_expression(self, gateway_id: int, stage_name: str, resource_id: Optional[int], step: str) -> str: - labels = self._get_labels_expression( - [ - *self.default_labels, - ("job", "=", self.job_name), - ("api", "=", gateway_id), - ("stage", "=", stage_name), - ("resource", "=", resource_id), - ] - ) - return ( - f"topk(10, sum(increase({self.metric_name_prefix}apigateway_api_requests_total{{" - f"{labels}" - f"}}[{step}])) by (api, resource, path))" - ) - - -DimensionMetricsManager.register(ResourceRequestsMetrics) - - -class ResourceFailedRequestsMetrics(BaseDimensionMetrics): - dimension = DimensionEnum.RESOURCE.value - metrics = MetricsEnum.FAILED_REQUESTS.value - - def get_query_expression(self, gateway_id: int, stage_name: str, resource_id: Optional[int], step: str) -> str: - labels = self._get_labels_expression( - [ - *self.default_labels, - ("job", "=", self.job_name), - ("api", "=", gateway_id), - ("stage", "=", stage_name), - ("resource", "=", resource_id), - ("proxy_error", "=", "1"), - ] - ) - return ( - f"topk(10, sum(increase({self.metric_name_prefix}apigateway_api_requests_total{{" - f"{labels}" - f"}}[{step}])) by (api, resource, path))" - ) - - -DimensionMetricsManager.register(ResourceFailedRequestsMetrics) - - -class AppRequestsMetrics(BaseDimensionMetrics): - dimension = DimensionEnum.APP.value - metrics = MetricsEnum.REQUESTS.value - - def get_query_expression(self, gateway_id: int, stage_name: str, resource_id: Optional[int], step: str) -> str: - labels = self._get_labels_expression( - [ - *self.default_labels, - ("job", "=", self.job_name), - ("api", "=", gateway_id), - ("stage", "=", stage_name), - ("resource", "=", resource_id), - ] - ) - return ( - f"topk(10, sum(increase({self.metric_name_prefix}apigateway_app_requests_total{{" - f"{labels}" - f"}}[{step}])) by (api, app_code))" - ) - - -DimensionMetricsManager.register(AppRequestsMetrics) - - -class ResourceNon200StatusRequestsMetrics(BaseDimensionMetrics): - dimension = DimensionEnum.RESOURCE_NON200_STATUS.value - metrics = MetricsEnum.REQUESTS.value - - def get_query_expression(self, gateway_id: int, stage_name: str, resource_id: Optional[int], step: str) -> str: - labels = self._get_labels_expression( - [ - *self.default_labels, - ("job", "=", self.job_name), - ("api", "=", gateway_id), - ("stage", "=", stage_name), - ("status", "!=", "200"), - ("resource", "=", resource_id), - ] - ) - return ( - f"topk(10, sum(increase({self.metric_name_prefix}apigateway_api_requests_total{{" - f"{labels}" - f"}}[{step}])) by (api, resource, path, status))" - ) - - -DimensionMetricsManager.register(ResourceNon200StatusRequestsMetrics) diff --git a/src/dashboard/apigateway/apigateway/apps/metrics/management/commands/query_api_request.py b/src/dashboard/apigateway/apigateway/apps/metrics/management/commands/query_api_request.py index 36dff5997..92122eca2 100644 --- a/src/dashboard/apigateway/apigateway/apps/metrics/management/commands/query_api_request.py +++ b/src/dashboard/apigateway/apigateway/apps/metrics/management/commands/query_api_request.py @@ -28,4 +28,4 @@ def handle(self, *args, **options): step = "1d" handler = StatisticsHandler() - handler.stats(start.float_timestamp, end.float_timestamp, step) + handler.stats(start.int_timestamp, end.int_timestamp, step) diff --git a/src/dashboard/apigateway/apigateway/apps/metrics/management/commands/stats_previous_day.py b/src/dashboard/apigateway/apigateway/apps/metrics/management/commands/stats_previous_day.py index 65a2b70cf..568460a43 100644 --- a/src/dashboard/apigateway/apigateway/apps/metrics/management/commands/stats_previous_day.py +++ b/src/dashboard/apigateway/apigateway/apps/metrics/management/commands/stats_previous_day.py @@ -28,4 +28,4 @@ def handle(self, *args, **options): step = "1d" handler = StatisticsHandler() - handler.stats(start.float_timestamp, end.float_timestamp, step) + handler.stats(start.int_timestamp, end.int_timestamp, step) diff --git a/src/dashboard/apigateway/apigateway/apps/metrics/serializers.py b/src/dashboard/apigateway/apigateway/apps/metrics/serializers.py index 6c3d1f469..2599a28e2 100644 --- a/src/dashboard/apigateway/apigateway/apps/metrics/serializers.py +++ b/src/dashboard/apigateway/apigateway/apps/metrics/serializers.py @@ -19,14 +19,14 @@ from django.utils.translation import gettext as _ from rest_framework import serializers -from .constants import DimensionEnum, MetricsEnum +from apigateway.apps.metrics.constants import DimensionEnum, MetricsEnum class MetricsQuerySLZ(serializers.Serializer): stage_id = serializers.IntegerField(required=True) resource_id = serializers.IntegerField(allow_null=True, required=False) - dimension = serializers.ChoiceField(choices=DimensionEnum.choices()) - metrics = serializers.ChoiceField(choices=MetricsEnum.choices()) + dimension = serializers.ChoiceField(choices=DimensionEnum.get_choices()) + metrics = serializers.ChoiceField(choices=MetricsEnum.get_choices()) time_range = serializers.IntegerField(required=False, min_value=0) time_start = serializers.IntegerField(required=False, min_value=0) time_end = serializers.IntegerField(required=False, min_value=0) @@ -35,13 +35,3 @@ def validate(self, data): if not (data.get("time_start") and data.get("time_end") or data.get("time_range")): raise serializers.ValidationError(_("参数 time_start+time_end, time_range 必须一组有效。")) return data - - -class PrometheusMatrixResultSLZ(serializers.Serializer): - metric = serializers.DictField(child=serializers.CharField()) - values = serializers.ListField(child=serializers.ListField(max_length=2, min_length=2)) - - -class PrometheusMatrixDataSLZ(serializers.Serializer): - resultType = serializers.CharField() - result = serializers.ListField(child=PrometheusMatrixResultSLZ()) diff --git a/src/dashboard/apigateway/apigateway/apps/metrics/statistics.py b/src/dashboard/apigateway/apigateway/apps/metrics/statistics.py index d81e48236..8bf77d6a0 100644 --- a/src/dashboard/apigateway/apigateway/apps/metrics/statistics.py +++ b/src/dashboard/apigateway/apigateway/apps/metrics/statistics.py @@ -16,19 +16,30 @@ # We undertake not to change the open source license (MIT license) applicable # to the current version of the project delivered to anyone in the future. # +import logging from collections import defaultdict +from typing import Optional from apigateway.apps.metrics.models import StatisticsAPIRequestByDay, StatisticsAppRequestByDay -from apigateway.apps.metrics.stats_helpers import ( +from apigateway.apps.metrics.stats_metrics import ( StatisticsAPIRequestDurationMetrics, StatisticsAPIRequestMetrics, StatisticsAppRequestMetrics, ) +from apigateway.core.models import Gateway, Resource from apigateway.utils.time import utctime +logger = logging.getLogger(__name__) + class StatisticsHandler: - def stats(self, start, end, step): + def __init__(self): + self._gateway_name_to_id = dict(Gateway.objects.all().values_list("name", "id")) + + # gateway_id -> resource_name -> resource_id, e.g. {1: {"echo": 10}} + self._gateway_id_to_resources = {} + + def stats(self, start: int, end: int, step: str): # 1. 清理统计时间重复的数据 self._clear_data_by_stats_time(start) @@ -44,78 +55,128 @@ def _clear_data_by_stats_time(self, start): def _save_api_request_data(self, start, end, step): api_request_count = StatisticsAPIRequestMetrics().query(end, step) - if not api_request_count.result: + if not api_request_count.get("series"): + logger.error("The resource request data obtained from Prometheus is empty, skip statistics.") return api_request_duration = StatisticsAPIRequestDurationMetrics().query(end, step) - if not api_request_duration.result: - return + if not api_request_duration.get("series"): + logger.warning("The resource request duration data obtained from Prometheus is empty.") + # 获取失败,则数据中不记录耗时,但不影响核心服务 # 统计请求数/失败请求数 - api_request_data = {} - for item in api_request_count.result: - key = f'{item.metric["api"]}:{item.metric["stage"]}:{item.metric["resource"]}' + api_request_data = defaultdict(dict) + for item in api_request_count["series"]: + dimensions = item["dimensions"] - api_request_data.setdefault(key, defaultdict(int)) - count = int(float(item.value[1])) + gateway_name = dimensions["api_name"] + key = f'{dimensions["stage_name"]}:{dimensions["resource_name"]}' + api_request_data[gateway_name].setdefault(key, defaultdict(float)) - api_request_data[key]["total_count"] += count - if item.metric["proxy_error"] != "0": - api_request_data[key]["failed_count"] += count + count = item["datapoints"][0][0] + api_request_data[gateway_name][key]["total_count"] += count + if dimensions["proxy_error"] != "0": + api_request_data[gateway_name][key]["failed_count"] += count # 统计请求总耗时 - for item in api_request_duration.result: - key = f'{item.metric["api"]}:{item.metric["stage"]}:{item.metric["resource"]}' + for item in api_request_duration.get("series", []): + dimensions = item["dimensions"] + + gateway_name = dimensions["api_name"] + key = f'{dimensions["stage_name"]}:{dimensions["resource_name"]}' - if key in api_request_data: - api_request_data[key]["total_msecs"] = int(float(item.value[1])) + if gateway_name in api_request_data and key in api_request_data[gateway_name]: + api_request_data[gateway_name][key]["total_msecs"] = item["datapoints"][0][0] # 保存数据 statistics_record = [] - for key, request_data in api_request_data.items(): - api_id, stage_name, resource_id = key.split(":") - - if request_data["total_count"] == 0 and request_data["failed_count"] == 0: + for gateway_name, gateway_request_data in api_request_data.items(): + gateway_id = self._get_gateway_id(gateway_name) + if not gateway_id: + logger.warning("gateway (name=%s) does not exist, skip save api statistics.", gateway_name) continue - statistics_record.append( - StatisticsAPIRequestByDay( - total_count=request_data["total_count"], - failed_count=request_data["failed_count"], - total_msecs=request_data["total_msecs"], - start_time=utctime(start).datetime, - end_time=utctime(end).datetime, - api_id=int(api_id), - stage_name=stage_name, - resource_id=int(resource_id), + for key, request_data in gateway_request_data.items(): + if int(request_data["total_count"]) == 0 and int(request_data["failed_count"]) == 0: + continue + + stage_name, resource_name = key.split(":") + resource_id = self._get_resource_id(gateway_id, resource_name) + if not resource_id: + logger.warning( + "resource (name=%s) of gateway (name=%s) does not exist, skip save api statistics.", + resource_name, + gateway_name, + ) + continue + + statistics_record.append( + StatisticsAPIRequestByDay( + total_count=int(request_data["total_count"]), + failed_count=int(request_data["failed_count"]), + total_msecs=int(request_data["total_msecs"]), + start_time=utctime(start).datetime, + end_time=utctime(end).datetime, + api_id=gateway_id, + stage_name=stage_name, + resource_id=resource_id, + ) ) - ) StatisticsAPIRequestByDay.objects.bulk_create(statistics_record, batch_size=100) def _save_app_request_data(self, start, end, step): app_request_count = StatisticsAppRequestMetrics().query(end, step) - if not app_request_count.result: + if not app_request_count.get("series"): + logger.error("The app request data obtained from Prometheus is empty, skip statistics.") return # 保存数据 statistics_record = [] - for item in app_request_count.result: - count = int(float(item.value[1])) + for item in app_request_count.get("series", []): + count = int(item["datapoints"][0][0]) if count == 0: continue + dimensions = item["dimensions"] + gateway_name = dimensions["api_name"] + resource_name = dimensions["resource_name"] + bk_app_code = dimensions.get("bk_app_code") or dimensions.get("app_code", "") + + gateway_id = self._get_gateway_id(gateway_name) + if not gateway_id: + logger.warning("gateway (name=%s) does not exist, skip save app statistics.", gateway_name) + continue + + resource_id = self._get_resource_id(gateway_id, resource_name) + if not resource_id: + logger.warning( + "resource (name=%s) of gateway (name=%s) does not exist, skip save app statistics.", + resource_name, + gateway_name, + ) + continue + statistics_record.append( StatisticsAppRequestByDay( total_count=count, start_time=utctime(start).datetime, end_time=utctime(end).datetime, - bk_app_code=item.metric.get("app_code", ""), - api_id=int(item.metric["api"]), - stage_name=item.metric["stage"], - resource_id=int(item.metric["resource"]), + bk_app_code=bk_app_code, + api_id=gateway_id, + stage_name=dimensions["stage_name"], + resource_id=resource_id, ) ) StatisticsAppRequestByDay.objects.bulk_create(statistics_record, batch_size=100) + + def _get_gateway_id(self, gateway_name: str) -> Optional[int]: + return self._gateway_name_to_id.get(gateway_name) + + def _get_resource_id(self, gateway_id: int, resource_name: str) -> Optional[int]: + if gateway_id not in self._gateway_id_to_resources: + self._gateway_id_to_resources[gateway_id] = Resource.objects.filter_resource_name_to_id(gateway_id) + + return self._gateway_id_to_resources[gateway_id].get(resource_name) diff --git a/src/dashboard/apigateway/apigateway/apps/metrics/stats_helpers.py b/src/dashboard/apigateway/apigateway/apps/metrics/stats_metrics.py similarity index 75% rename from src/dashboard/apigateway/apigateway/apps/metrics/stats_helpers.py rename to src/dashboard/apigateway/apigateway/apps/metrics/stats_metrics.py index 106fd8088..5470bd44a 100644 --- a/src/dashboard/apigateway/apigateway/apps/metrics/stats_helpers.py +++ b/src/dashboard/apigateway/apigateway/apps/metrics/stats_metrics.py @@ -18,49 +18,50 @@ # from abc import abstractmethod -from apigateway.apps.metrics.helpers import BasePrometheusMetrics +from django.conf import settings + +from apigateway.apps.metrics.dimension_metrics import BasePrometheusMetrics from apigateway.components.prometheus import prometheus_component class BaseStatisticsMetrics(BasePrometheusMetrics): @abstractmethod - def _get_query_expression(self, step: str): + def _get_query_promql(self, step: str): pass - def query(self, time: float, step: str): + def query(self, time_: int, step: str): return prometheus_component.query( - query=self._get_query_expression(step), - time=time, + bk_biz_id=getattr(settings, "BCS_CLUSTER_BK_BIZ_ID", ""), + promql=self._get_query_promql(step), + time_=time_, ) class StatisticsAPIRequestMetrics(BaseStatisticsMetrics): - def _get_query_expression(self, step): + def _get_query_promql(self, step): labels = self._get_labels_expression( [ *self.default_labels, - ("job", "=", self.job_name), ] ) return ( f"sum(increase({self.metric_name_prefix}apigateway_api_requests_total{{" f"{labels}" - f"}}[{step}])) by (api, stage, resource, proxy_error)" + f"}}[{step}])) by (api_name, stage_name, resource_name, proxy_error)" ) class StatisticsAPIRequestDurationMetrics(BaseStatisticsMetrics): - def _get_query_expression(self, step): + def _get_query_promql(self, step): labels = self._get_labels_expression( [ *self.default_labels, - ("job", "=", self.job_name), ] ) return ( f"sum(increase({self.metric_name_prefix}apigateway_api_request_duration_milliseconds_sum{{" f"{labels}" - f"}}[{step}])) by (api, stage, resource)" + f"}}[{step}])) by (api_name, stage_name, resource_name)" ) @@ -69,17 +70,16 @@ class StatisticsAppRequestMetrics(BaseStatisticsMetrics): 根据网关、环境、资源,统计应用请求量 """ - def _get_query_expression(self, step): + def _get_query_promql(self, step): labels = self._get_labels_expression( [ *self.default_labels, - ("job", "=", self.job_name), ] ) return ( f"sum(increase({self.metric_name_prefix}apigateway_app_requests_total{{" f"{labels}" - f"}}[{step}])) by (app_code, api, stage, resource)" + f"}}[{step}])) by (app_code, api_name, stage_name, resource_name)" ) @@ -88,15 +88,14 @@ class StatisticsAppRequestByResourceMetrics(BaseStatisticsMetrics): 根据网关、资源,统计应用请求量 """ - def _get_query_expression(self, step): + def _get_query_promql(self, step): labels = self._get_labels_expression( [ *self.default_labels, - ("job", "=", self.job_name), ] ) return ( f"sum(increase({self.metric_name_prefix}apigateway_app_requests_total{{" f"{labels}" - f"}}[{step}])) by (app_code, api, resource)" + f"}}[{step}])) by (app_code, api_name, resource_name)" ) diff --git a/src/dashboard/apigateway/apigateway/apps/metrics/tasks.py b/src/dashboard/apigateway/apigateway/apps/metrics/tasks.py index ca0d9395e..d6f698016 100644 --- a/src/dashboard/apigateway/apigateway/apps/metrics/tasks.py +++ b/src/dashboard/apigateway/apigateway/apps/metrics/tasks.py @@ -31,4 +31,4 @@ def statistics_request_by_day(): step = "1d" handler = StatisticsHandler() - handler.stats(start.float_timestamp, end.float_timestamp, step) + handler.stats(start.int_timestamp, end.int_timestamp, step) diff --git a/src/dashboard/apigateway/apigateway/apps/metrics/utils.py b/src/dashboard/apigateway/apigateway/apps/metrics/utils.py new file mode 100644 index 000000000..28e696572 --- /dev/null +++ b/src/dashboard/apigateway/apigateway/apps/metrics/utils.py @@ -0,0 +1,63 @@ +# +# TencentBlueKing is pleased to support the open source community by making +# 蓝鲸智云 - API 网关(BlueKing - APIGateway) available. +# Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. +# Licensed under the MIT License (the "License"); you may not use this file except +# in compliance with the License. You may obtain a copy of the License at +# +# http://opensource.org/licenses/MIT +# +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific language governing permissions and +# limitations under the License. +# +# We undertake not to change the open source license (MIT license) applicable +# to the current version of the project delivered to anyone in the future. +# +import math + +from apigateway.utils.time import SmartTimeRange + + +class MetricsSmartTimeRange(SmartTimeRange): + def get_recommended_step(self) -> str: + """根据 time_start, time_end,获取推荐的步长""" + start, end = self.get_head_and_tail() + + return self._calculate_step(start, end) + + def _calculate_step(self, start: int, end: int) -> str: + """ + :param start: 起始时间戳 + :param end: 结束时间戳 + :returns: 推荐步长 + + step via the gap of query time + 1m <- 1h + 5m <- 6h + 10m <- 12h + 30m <- 24h + 1h <- 72h + 3h <- 7d + 12h <- >7d + """ + step_options = ["1m", "5m", "10m", "30m", "1h", "3h", "12h"] + + gap_minutes = math.ceil((end - start) / 60) + if gap_minutes <= 60: + index = 0 + elif gap_minutes <= 360: + index = 1 + elif gap_minutes <= 720: + index = 2 + elif gap_minutes <= 1440: + index = 3 + elif gap_minutes <= 4320: + index = 4 + elif gap_minutes <= 10080: + index = 5 + else: + index = 6 + + return step_options[index] diff --git a/src/dashboard/apigateway/apigateway/apps/metrics/views.py b/src/dashboard/apigateway/apigateway/apps/metrics/views.py index 383855bb7..a2289386e 100644 --- a/src/dashboard/apigateway/apigateway/apps/metrics/views.py +++ b/src/dashboard/apigateway/apigateway/apps/metrics/views.py @@ -21,25 +21,18 @@ from rest_framework import status from rest_framework.views import APIView -from apigateway.components.prometheus import prometheus_component -from apigateway.core.models import Stage +from apigateway.apps.metrics import serializers +from apigateway.apps.metrics.constants import DimensionEnum, MetricsEnum +from apigateway.apps.metrics.dimension_metrics import DimensionMetricsFactory +from apigateway.apps.metrics.utils import MetricsSmartTimeRange +from apigateway.core.models import Resource, Stage from apigateway.utils.responses import OKJsonResponse -from apigateway.utils.time import SmartTimeRange - -from . import serializers -from .helpers import DimensionMetricsManager class QueryRangeAPIView(APIView): - def _get_stage(self, stage_id): - try: - return Stage.objects.get(api=self.request.gateway, id=stage_id) - except Stage.DoesNotExist: - raise Http404 - @swagger_auto_schema( query_serializer=serializers.MetricsQuerySLZ, - responses={status.HTTP_200_OK: serializers.PrometheusMatrixDataSLZ}, + responses={status.HTTP_200_OK: ""}, tags=["Metrics"], ) def get(self, request, *args, **kwargs): @@ -48,29 +41,32 @@ def get(self, request, *args, **kwargs): data = slz.validated_data - smart_time_range = SmartTimeRange( + stage_name = Stage.objects.get_name(request.gateway.id, data["stage_id"]) + if not stage_name: + raise Http404 + + resource_name = None + if data.get("resource_id"): + resource_name = Resource.objects.get_name(request.gateway.id, data["resource_id"]) + + smart_time_range = MetricsSmartTimeRange( data.get("time_start"), data.get("time_end"), data.get("time_range"), ) time_start, time_end = smart_time_range.get_head_and_tail() - step = smart_time_range.get_interval() + step = smart_time_range.get_recommended_step() - # generate query expression - metrics = DimensionMetricsManager.create_dimension_metrics(data["dimension"], data["metrics"]) - query_expression = metrics.get_query_expression( - gateway_id=request.gateway.id, - stage_name=self._get_stage(data["stage_id"]).name, - resource_id=data.get("resource_id"), - step=step, + metrics = DimensionMetricsFactory.create_dimension_metrics( + DimensionEnum(data["dimension"]), MetricsEnum(data["metrics"]) ) - - # request prometheus http api to get metrics data - data = prometheus_component.query_range( - query=query_expression, + data = metrics.query_range( + gateway_name=request.gateway.name, + stage_name=stage_name, + resource_name=resource_name, start=time_start, end=time_end, step=step, ) - return OKJsonResponse("OK", data=data.dict()) + return OKJsonResponse("OK", data=data) diff --git a/src/dashboard/apigateway/apigateway/apps/permission/managers.py b/src/dashboard/apigateway/apigateway/apps/permission/managers.py index 4998f04f4..e46e2af38 100644 --- a/src/dashboard/apigateway/apigateway/apps/permission/managers.py +++ b/src/dashboard/apigateway/apigateway/apps/permission/managers.py @@ -84,8 +84,11 @@ def save_permissions(self, gateway, resource_ids=None, bk_app_code=None, grant_t def renew_permission(self, gateway, ids=None): queryset = self.filter_permission(gateway=gateway, ids=ids) + # 仅续期权限期限小于待续期时间的权限 + expires = to_datetime_from_now(days=DEFAULT_PERMISSION_EXPIRE_DAYS) + queryset = queryset.filter(expires__lt=expires) queryset.update( - expires=to_datetime_from_now(days=DEFAULT_PERMISSION_EXPIRE_DAYS), + expires=expires, updated_time=now_datetime(), ) @@ -159,8 +162,32 @@ def renew_permission( expire_days=DEFAULT_PERMISSION_EXPIRE_DAYS, ): queryset = self.filter_permission(gateway=gateway, bk_app_code=bk_app_code, resource_ids=resource_ids, ids=ids) + # 仅续期权限期限小于待续期时间的权限 + expires = to_datetime_from_now(days=expire_days) + queryset = queryset.filter(expires__lt=expires) queryset.update( - expires=to_datetime_from_now(days=expire_days), + expires=expires, + grant_type=grant_type, + ) + + def renew_not_expired_permissions( + self, + gateway_id: int, + bk_app_code: str, + resource_ids: List[int], + grant_type=GrantTypeEnum.RENEW.value, + expire_days=DEFAULT_PERMISSION_EXPIRE_DAYS, + ): + """仅续期未过期且权限期限小于待续期时间的权限""" + expires = to_datetime_from_now(days=expire_days) + queryset = self.filter( + api_id=gateway_id, + bk_app_code=bk_app_code, + resource_id__in=resource_ids, + expires__range=(now_datetime(), expires), + ) + queryset.update( + expires=expires, grant_type=grant_type, ) diff --git a/src/dashboard/apigateway/apigateway/apps/permission/tasks.py b/src/dashboard/apigateway/apigateway/apps/permission/tasks.py index 708720359..7fb2d0462 100644 --- a/src/dashboard/apigateway/apigateway/apps/permission/tasks.py +++ b/src/dashboard/apigateway/apigateway/apps/permission/tasks.py @@ -17,15 +17,16 @@ # to the current version of the project delivered to anyone in the future. # import base64 +import datetime import logging from collections import defaultdict -import arrow from celery import shared_task from django.conf import settings from django.template.loader import render_to_string +from django.utils import timezone -from apigateway.apps.metrics.stats_helpers import StatisticsAppRequestByResourceMetrics +from apigateway.apps.metrics.models import StatisticsAppRequestByDay from apigateway.apps.permission.constants import ( APIGW_LOGO_PATH, ApplyStatusEnum, @@ -36,7 +37,6 @@ from apigateway.apps.permission.helpers import PermissionDimensionManager from apigateway.apps.permission.models import AppPermissionApply, AppPermissionRecord, AppResourcePermission from apigateway.components.cmsi import cmsi_component -from apigateway.core.models import Gateway from apigateway.utils.file import read_file logger = logging.getLogger(__name__) @@ -161,45 +161,26 @@ def send_mail_for_perm_handle(record_id): def renew_app_resource_permission(): """ 蓝鲸应用访问资源权限自动续期 - """ - # 统计前一天的请求数据,为防止临界时间点的数据统计不到,时间跨度设置为 25 小时 - _, end = arrow.utcnow().shift(days=-1).span("day") - step = "25h" - - app_request_count = StatisticsAppRequestByResourceMetrics().query(end.float_timestamp, step) - if not app_request_count.result: - logger.error("statistics app requests by resource to renew app resource permission fail.") - return - - app_api_resources = {} - for item in app_request_count.result: - count = float(item.value[1]) - if count <= 0: - continue - # 部分资源不认证应用 - if not item.metric.get("app_code"): - continue - - app_api_resources.setdefault(item.metric["app_code"], defaultdict(list)) - app_api_resources[item.metric["app_code"]][item.metric["api"]].append(int(item.metric["resource"])) + - 仅续期未过期的应用资源权限 + """ + # 为防止统计数据获取偏差,时间跨度设置为 2 天 + time_range_days = 2 - for bk_app_code, api_resources in app_api_resources.items(): - for gateway_id, resource_ids in api_resources.items(): - gateway = Gateway.objects.filter(id=int(gateway_id)).first() - if not gateway: - logger.warning(f"api[id={gateway_id}] not exist, renew app resource permission fail") - continue + time_ = timezone.now() + datetime.timedelta(days=-time_range_days) + queryset = StatisticsAppRequestByDay.objects.filter(end_time__gt=time_) - # 如果应用-资源权限不存在,则将按网关的权限同步到应用-资源权限 - AppResourcePermission.objects.sync_from_gateway_permission( - gateway, - bk_app_code=bk_app_code, - resource_ids=resource_ids, - ) + app_request_data = defaultdict(dict) + for item in queryset: + if not item.bk_app_code: + continue + app_request_data[item.bk_app_code].setdefault(item.api_id, set()) + app_request_data[item.bk_app_code][item.api_id].add(item.resource_id) - AppResourcePermission.objects.renew_permission( - gateway, + for bk_app_code, gateway_resources in app_request_data.items(): + for gateway_id, resource_ids in gateway_resources.items(): + AppResourcePermission.objects.renew_not_expired_permissions( + gateway_id, bk_app_code=bk_app_code, resource_ids=resource_ids, grant_type=GrantTypeEnum.AUTO_RENEW.value, diff --git a/src/dashboard/apigateway/apigateway/components/bkapi_client/bkmonitorv3.py b/src/dashboard/apigateway/apigateway/components/bkapi_client/bkmonitorv3.py new file mode 100644 index 000000000..42e6a4730 --- /dev/null +++ b/src/dashboard/apigateway/apigateway/components/bkapi_client/bkmonitorv3.py @@ -0,0 +1,36 @@ +# +# TencentBlueKing is pleased to support the open source community by making +# 蓝鲸智云 - API 网关(BlueKing - APIGateway) available. +# Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. +# Licensed under the MIT License (the "License"); you may not use this file except +# in compliance with the License. You may obtain a copy of the License at +# +# http://opensource.org/licenses/MIT +# +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific language governing permissions and +# limitations under the License. +# +# We undertake not to change the open source license (MIT license) applicable +# to the current version of the project delivered to anyone in the future. +# +from bkapi_client_core.apigateway import APIGatewayClient, Operation, OperationGroup, bind_property + + +class Group(OperationGroup): + # 统一查询时序数据 + promql_query = bind_property( + Operation, + name="promql_query", + method="POST", + path="/promql_query/", + ) + + +class Client(APIGatewayClient): + """Bkapi bkmonitorv3 client""" + + _api_name = "bkmonitorv3" + + api = bind_property(Group, name="api") diff --git a/src/dashboard/apigateway/apigateway/components/esb_components.py b/src/dashboard/apigateway/apigateway/components/esb_components.py index 3456395d9..a50d34db4 100644 --- a/src/dashboard/apigateway/apigateway/components/esb_components.py +++ b/src/dashboard/apigateway/apigateway/components/esb_components.py @@ -45,11 +45,21 @@ class EsbGroup(OperationGroup): ) +class MonitorV3Group(OperationGroup): + promql_query = bind_property( + Operation, + name="promql_query", + method="POST", + path="/api/c/compapi/v2/monitor_v3/graph_promql_query/", + ) + + class Client(ESBClient): """ESB Components""" bk_log = bind_property(BkLogGroup, name="bk_log") esb = bind_property(EsbGroup, name="esb") + monitor_v3 = bind_property(MonitorV3Group, name="monitor_v3") get_client_by_username = _partial(Client, _get_client_by_username) diff --git a/src/dashboard/apigateway/apigateway/components/prometheus.py b/src/dashboard/apigateway/apigateway/components/prometheus.py index 0220af411..000d97e47 100644 --- a/src/dashboard/apigateway/apigateway/components/prometheus.py +++ b/src/dashboard/apigateway/apigateway/components/prometheus.py @@ -16,101 +16,82 @@ # We undertake not to change the open source license (MIT license) applicable # to the current version of the project delivered to anyone in the future. # -from math import nan -from typing import Any, Dict, List, Text, Tuple +from operator import itemgetter +from typing import Any, Dict +from bkapi_client_core.apigateway import OperationGroup +from bkapi_client_core.apigateway.django_helper import get_client_by_username as get_client_by_username_for_apigateway from django.conf import settings -from pydantic import BaseModel -from apigateway.common.error_codes import error_codes -from apigateway.utils.http import http_get - -from .component import BaseComponent - -PROMETHEUS_API_TIMEOUT = 30 - - -class RequestResult(BaseModel): - status: Text = "" - data: Dict[Text, Any] = {} - error: Text = "" - - -class InstantVector(BaseModel): - metric: Dict[Text, Text] = {} - value: Tuple[float, Text] = (nan, "NaN") - - -class RangeVector(BaseModel): - metric: Dict[Text, Text] = {} - values: List[Tuple[float, Text]] = [] - - -class QueryResult(BaseModel): - result: List[InstantVector] = [] - - -class QueryRangeResult(BaseModel): - result: List[RangeVector] = [] - - -class PrometheusComponent(BaseComponent): - - HOST = getattr(settings, "BCS_THANOS_URL", "") - - def parse_response(self, http_ok, resp): - if not (http_ok and resp): - return False, "", None - - result = RequestResult(**resp) - return result.status == "success", result.error, result.data +from apigateway.components.bkapi_client.bkmonitorv3 import Client as BkMonitorV3Client +from apigateway.components.esb_components import get_client_by_username as get_client_by_username_for_esb +from apigateway.components.handler import RequestAPIHandler + + +class PrometheusComponent: + def __init__(self): + self._api_client: OperationGroup = self._get_api_client() + self._request_handler = RequestAPIHandler("bkmonitorv3") + + def query_range(self, bk_biz_id: str, promql: str, start: int, end: int, step: str) -> Dict[str, Any]: + """ + Evaluate an expression query over a range of time + + :param bk_biz_id: business ID + :param promql: prometheus query language + :param start: start timestamp, e.g. 1622009400 + :param end: end timestamp, e.g. 1622009500 + :param step: step, e.g. "1m" + """ + return self._promql_query(bk_biz_id, promql, start, end, step, "range") + + def query(self, bk_biz_id: str, promql: str, time_: int) -> Dict[str, Any]: + """ + Evaluate an instant query at a single point in time + + :param bk_biz_id: business ID + :param promql: prometheus query language + :param time_: evaluation timestamp, e.g. 1622009400 + """ + # Instant query, no need for start, step, + # but the backend does not allow the value to be null, so set a default value. + # step: set to 1m, backend use it to calculate real evaluation timestamp + return self._promql_query(bk_biz_id, promql, 0, time_, "1m", "instant") + + def _promql_query( + self, bk_biz_id: str, promql: str, start: int, end: int, step: str, type_: str + ) -> Dict[str, Any]: + """ + Common query Prometheus data interface + + :param type_: choices: range, instant + - range: corresponds to Prometheus /api/v1/query_range + - instant: corresponds to Prometheus /api/v1/query + """ + data = { + "bk_biz_id": bk_biz_id, + "promql": promql, + "start_time": start, + "end_time": end, + "step": step, + "format": "time_series", + "type": type_, + } - def _call_api(self, http_func, path, data, **kwargs): - # TODO: _call_api 更改了父类的协议, - # components 内部逻辑统一调整 1 使用 dynatic 封装响应,2 出错抛出异常,方法直接返回需要的数据 - ok, message, data = super()._call_api( - http_func, - path, - data, - headers={ - "Content-Type": "application/x-www-form-urlencoded", - }, - timeout=PROMETHEUS_API_TIMEOUT, - auth=(getattr(settings, "BCS_THANOS_USER", ""), getattr(settings, "BCS_THANOS_PASSWD", "")), - ) + headers = {"X-Bk-Scope-Space-Uid": f"bkcc__{bk_biz_id}"} - if not ok: - raise error_codes.COMPONENT_ERROR.format(message) + api_result, response = self._request_handler.call_api(self._api_client.promql_query, data, headers=headers) + return self._request_handler.parse_api_result(api_result, response, {"code": 200}, itemgetter("data")) - return data + def _get_api_client(self) -> OperationGroup: + # use gateway: bkmonitorv3 + if settings.USE_BKAPI_BKMONITORV3: + apigw_client = get_client_by_username_for_apigateway(BkMonitorV3Client, username="admin") + return apigw_client.api - def query_range(self, query, start, end, step): - params = { - "query": query, - "start": start, - "end": end, - "step": step, - "timeout": PROMETHEUS_API_TIMEOUT, - } - data = self._call_api( - http_get, - "/api/v1/query_range", - params, - ) - return QueryRangeResult(**data) - - def query(self, query, time): - params = { - "query": query, - "time": time, - "timeout": PROMETHEUS_API_TIMEOUT, - } - data = self._call_api( - http_get, - "/api/v1/query", - params, - ) - return QueryResult(**data) + # use esb api + esb_client = get_client_by_username_for_esb("admin") + return esb_client.monitor_v3 prometheus_component = PrometheusComponent() diff --git a/src/dashboard/apigateway/apigateway/conf/default.py b/src/dashboard/apigateway/apigateway/conf/default.py index 25825e4fa..fe14c6cde 100644 --- a/src/dashboard/apigateway/apigateway/conf/default.py +++ b/src/dashboard/apigateway/apigateway/conf/default.py @@ -418,7 +418,7 @@ } ) -if env.bool("FEATURE_FLAG_ENABLE_RUN_DATA_METRICS", False): +if env.bool("FEATURE_FLAG_ENABLE_RUN_DATA_METRICS", True): CELERY_BEAT_SCHEDULE.update( { "apigateway.apps.metrics.tasks.statistics_request_by_day": { @@ -567,6 +567,9 @@ # BCS 为网关分配的认证 Token BCS_API_GATEWAY_TOKEN = env.str("BCS_API_GATEWAY_TOKEN", "") +# 网关部署集群所属业务ID,影响从蓝鲸监控拉取 Prometheus 数据等功能;开源环境默认部署在蓝鲸业务(业务 ID=2) +BCS_CLUSTER_BK_BIZ_ID = env.str("BCS_CLUSTER_BK_BIZ_ID", "2") + # edge controller 配置 EDGE_CONTROLLER_API_NAME = "bk-apigateway" # 托管的微网关实例,实例部署所用 chart 由网关生成,此 chart 中,endpoints + base_path 应为微网关实例访问网关数据的网关接口地址前缀 @@ -759,6 +762,10 @@ # TODO: 待启用 IAM 鉴权时,将默认值改为 True USE_BK_IAM_PERMISSION = env.bool("USE_BK_IAM_PERMISSION", False) +# 使用 bkmonitorv3 网关 API,还是 monitor_v3 组件 API +USE_BKAPI_BKMONITORV3 = env.bool("USE_BKAPI_BKMONITORV3", False) + + # ============================================================================== # Feature Flag # ============================================================================== @@ -767,7 +774,7 @@ DEFAULT_FEATURE_FLAG = { "ENABLE_MONITOR": env.bool("FEATURE_FLAG_ENABLE_MONITOR", False), "ENABLE_RUN_DATA": env.bool("FEATURE_FLAG_ENABLE_RUN_DATA", True), - "ENABLE_RUN_DATA_METRICS": env.bool("FEATURE_FLAG_ENABLE_RUN_DATA_METRICS", False), + "ENABLE_RUN_DATA_METRICS": env.bool("FEATURE_FLAG_ENABLE_RUN_DATA_METRICS", True), "ALLOW_UPLOAD_SDK_TO_REPOSITORY": env.bool("FEATURE_FLAG_ALLOW_UPLOAD_SDK_TO_REPOSITORY", False), "MENU_ITEM_HELP": env.bool("FEATURE_FLAG_MENU_ITEM_HELP", False), "MENU_ITEM_ESB_API": env.bool("FEATURE_FLAG_MENU_ITEM_ESB_API", True), diff --git a/src/dashboard/apigateway/apigateway/core/managers.py b/src/dashboard/apigateway/apigateway/core/managers.py index 6dccb8742..76f68a6dd 100644 --- a/src/dashboard/apigateway/apigateway/core/managers.py +++ b/src/dashboard/apigateway/apigateway/core/managers.py @@ -429,6 +429,9 @@ def get_gateway_name_to_active_stage_names(self, gateways) -> Dict[str, List[str return gateway_name_to_stage_names + def get_name(self, gateway_id: int, id_: int) -> Optional[str]: + return self.filter(api_id=gateway_id, id=id_).values_list("name", flat=True).first() + class ResourceManager(models.Manager): # TODO: 断点, 把这个函数挪到 ResourceHandler里面去 @@ -729,6 +732,9 @@ def get_resource_ids_by_names(self, gateway_id: int, resource_names: Optional[Li return list(self.filter(api_id=gateway_id, name__in=resource_names).values_list("id", flat=True)) + def get_name(self, gateway_id: int, id_: int) -> Optional[str]: + return self.filter(api=gateway_id, id=id_).values_list("name", flat=True).first() + class ProxyManager(models.Manager): # FIXME: move to biz layer diff --git a/src/dashboard/apigateway/apigateway/core/migrations/0029_publishevent.py b/src/dashboard/apigateway/apigateway/core/migrations/0029_publishevent.py index 07bba19a7..492d63054 100644 --- a/src/dashboard/apigateway/apigateway/core/migrations/0029_publishevent.py +++ b/src/dashboard/apigateway/apigateway/core/migrations/0029_publishevent.py @@ -1,3 +1,20 @@ +# +# TencentBlueKing is pleased to support the open source community by making +# 蓝鲸智云 - API 网关(BlueKing - APIGateway) available. +# Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. +# Licensed under the MIT License (the "License"); you may not use this file except +# in compliance with the License. You may obtain a copy of the License at +# +# http://opensource.org/licenses/MIT +# +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific language governing permissions and +# limitations under the License. +# +# We undertake not to change the open source license (MIT license) applicable +# to the current version of the project delivered to anyone in the future. +# # Generated by Django 3.2.18 on 2023-07-07 05:20 import django.db.models.deletion diff --git a/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_dimension_metrics.py b/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_dimension_metrics.py new file mode 100644 index 000000000..b273ba9f8 --- /dev/null +++ b/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_dimension_metrics.py @@ -0,0 +1,357 @@ +# -*- coding: utf-8 -*- +# +# TencentBlueKing is pleased to support the open source community by making +# 蓝鲸智云 - API 网关(BlueKing - APIGateway) available. +# Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. +# Licensed under the MIT License (the "License"); you may not use this file except +# in compliance with the License. You may obtain a copy of the License at +# +# http://opensource.org/licenses/MIT +# +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific language governing permissions and +# limitations under the License. +# +# We undertake not to change the open source license (MIT license) applicable +# to the current version of the project delivered to anyone in the future. +# +from apigateway.apps.metrics import dimension_metrics +from apigateway.apps.metrics.constants import DimensionEnum, MetricsEnum + + +class TestRequestsMetrics: + def test_get_query_promql(self, mocker): + mocker.patch("apigateway.apps.metrics.dimension_metrics.BaseDimensionMetrics.default_labels", return_value=[]) + + data = [ + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": "get_foo", + "step": "1m", + }, + "expected": ( + 'sum(increase(bk_apigateway_apigateway_api_requests_total{api_name="foo", ' + 'stage_name="prod", resource_name="get_foo"}[1m]))' + ), + }, + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": None, + "step": "1m", + }, + "expected": ( + 'sum(increase(bk_apigateway_apigateway_api_requests_total{api_name="foo", ' + 'stage_name="prod"}[1m]))' + ), + }, + ] + for test in data: + metrics = dimension_metrics.RequestsMetrics() + result = metrics._get_query_promql(**test["params"]) + assert result == test["expected"] + + +class TestFailedRequestsMetrics: + def test_get_query_promql(self, mocker): + mocker.patch("apigateway.apps.metrics.dimension_metrics.BaseDimensionMetrics.default_labels", return_value=[]) + + data = [ + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": "get_foo", + "step": "1m", + }, + "expected": ( + 'sum(increase(bk_apigateway_apigateway_api_requests_total{api_name="foo", ' + 'stage_name="prod", resource_name="get_foo", proxy_error="1"}[1m]))' + ), + }, + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": None, + "step": "1m", + }, + "expected": ( + 'sum(increase(bk_apigateway_apigateway_api_requests_total{api_name="foo", ' + 'stage_name="prod", proxy_error="1"}[1m]))' + ), + }, + ] + for test in data: + metrics = dimension_metrics.FailedRequestsMetrics() + result = metrics._get_query_promql(**test["params"]) + assert result == test["expected"], result + + +class TestResponseTime95thMetrics: + def test_get_query_promql(self, mocker): + mocker.patch("apigateway.apps.metrics.dimension_metrics.BaseDimensionMetrics.default_labels", return_value=[]) + + data = [ + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": "get_foo", + "step": "1m", + }, + "expected": ( + "histogram_quantile(0.95, sum(rate(bk_apigateway_apigateway_api_request_duration_milliseconds_bucket{" + 'api_name="foo", stage_name="prod", resource_name="get_foo"}[1m])) by (le, api_name))' + ), + }, + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": None, + "step": "1m", + }, + "expected": ( + "histogram_quantile(0.95, sum(rate(bk_apigateway_apigateway_api_request_duration_milliseconds_bucket{" + 'api_name="foo", stage_name="prod"}[1m])) ' + "by (le, api_name))" + ), + }, + ] + for test in data: + metrics = dimension_metrics.ResponseTime95thMetrics() + result = metrics._get_query_promql(**test["params"]) + assert result == test["expected"], result + + +class TestResponseTime50thMetrics: + def test_get_query_promql(self, mocker): + mocker.patch("apigateway.apps.metrics.dimension_metrics.BaseDimensionMetrics.default_labels", return_value=[]) + + data = [ + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": "get_foo", + "step": "1m", + }, + "expected": ( + "histogram_quantile(0.5, sum(rate(bk_apigateway_apigateway_api_request_duration_milliseconds_bucket{" + 'api_name="foo", stage_name="prod", resource_name="get_foo"}[1m])) by (le, api_name))' + ), + }, + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": None, + "step": "1m", + }, + "expected": ( + "histogram_quantile(0.5, sum(rate(bk_apigateway_apigateway_api_request_duration_milliseconds_bucket{" + 'api_name="foo", stage_name="prod"}[1m])) ' + "by (le, api_name))" + ), + }, + ] + for test in data: + metrics = dimension_metrics.ResponseTime50thMetrics() + result = metrics._get_query_promql(**test["params"]) + assert result == test["expected"], result + + +class TestResourceRequestsMetrics: + def test_get_query_promql(self, mocker): + mocker.patch("apigateway.apps.metrics.dimension_metrics.BaseDimensionMetrics.default_labels", return_value=[]) + + data = [ + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": "get_foo", + "step": "1m", + }, + "expected": ( + 'topk(10, sum(increase(bk_apigateway_apigateway_api_requests_total{api_name="foo", ' + 'stage_name="prod", resource_name="get_foo"}[1m])) by (api_name, resource_name, matched_uri))' + ), + }, + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": None, + "step": "1m", + }, + "expected": ( + 'topk(10, sum(increase(bk_apigateway_apigateway_api_requests_total{api_name="foo", ' + 'stage_name="prod"}[1m])) by (api_name, resource_name, matched_uri))' + ), + }, + ] + for test in data: + metrics = dimension_metrics.ResourceRequestsMetrics() + result = metrics._get_query_promql(**test["params"]) + assert result == test["expected"], result + + +class TestResourceFailedRequestsMetrics: + def test_get_query_promql(self, mocker): + mocker.patch("apigateway.apps.metrics.dimension_metrics.BaseDimensionMetrics.default_labels", return_value=[]) + + data = [ + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": "get_foo", + "step": "1m", + }, + "expected": ( + 'topk(10, sum(increase(bk_apigateway_apigateway_api_requests_total{api_name="foo", ' + 'stage_name="prod", resource_name="get_foo", proxy_error="1"}[1m])) by (api_name, resource_name, matched_uri))' + ), + }, + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": None, + "step": "1m", + }, + "expected": ( + 'topk(10, sum(increase(bk_apigateway_apigateway_api_requests_total{api_name="foo", ' + 'stage_name="prod", proxy_error="1"}[1m])) by (api_name, resource_name, matched_uri))' + ), + }, + ] + for test in data: + metrics = dimension_metrics.ResourceFailedRequestsMetrics() + result = metrics._get_query_promql(**test["params"]) + assert result == test["expected"], result + + +class TestAppRequestsMetrics: + def test_get_query_promql(self, mocker): + mocker.patch("apigateway.apps.metrics.dimension_metrics.BaseDimensionMetrics.default_labels", return_value=[]) + + data = [ + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": "get_foo", + "step": "1m", + }, + "expected": ( + 'topk(10, sum(increase(bk_apigateway_apigateway_app_requests_total{api_name="foo", ' + 'stage_name="prod", resource_name="get_foo"}[1m])) by (api_name, app_code))' + ), + }, + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": None, + "step": "1m", + }, + "expected": ( + 'topk(10, sum(increase(bk_apigateway_apigateway_app_requests_total{api_name="foo", ' + 'stage_name="prod"}[1m])) by (api_name, app_code))' + ), + }, + ] + for test in data: + metrics = dimension_metrics.AppRequestsMetrics() + result = metrics._get_query_promql(**test["params"]) + assert result == test["expected"], result + + +class TestResourceNon200StatusRequestsMetrics: + def test_get_query_promql(self, mocker): + mocker.patch("apigateway.apps.metrics.dimension_metrics.BaseDimensionMetrics.default_labels", return_value=[]) + + data = [ + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": "get_foo", + "step": "1m", + }, + "expected": ( + 'topk(10, sum(increase(bk_apigateway_apigateway_api_requests_total{api_name="foo", ' + 'stage_name="prod", resource_name="get_foo", status!="200"}[1m])) by (api_name, resource_name, matched_uri, status))' + ), + }, + { + "params": { + "gateway_name": "foo", + "stage_name": "prod", + "resource_name": None, + "step": "1m", + }, + "expected": ( + 'topk(10, sum(increase(bk_apigateway_apigateway_api_requests_total{api_name="foo", ' + 'stage_name="prod", status!="200"}[1m])) by (api_name, resource_name, matched_uri, status))' + ), + }, + ] + for test in data: + metrics = dimension_metrics.ResourceNon200StatusRequestsMetrics() + result = metrics._get_query_promql(**test["params"]) + assert result == test["expected"], result + + +class TestDimensionMetricsFactory: + def test_create_dimension_metrics(self): + data = [ + { + "dimension": "all", + "metrics": "requests", + "expected": dimension_metrics.RequestsMetrics, + }, + { + "dimension": "all", + "metrics": "failed_requests", + "expected": dimension_metrics.FailedRequestsMetrics, + }, + { + "dimension": "all", + "metrics": "response_time_95th", + "expected": dimension_metrics.ResponseTime95thMetrics, + }, + { + "dimension": "all", + "metrics": "response_time_50th", + "expected": dimension_metrics.ResponseTime50thMetrics, + }, + { + "dimension": "resource", + "metrics": "requests", + "expected": dimension_metrics.ResourceRequestsMetrics, + }, + { + "dimension": "app", + "metrics": "requests", + "expected": dimension_metrics.AppRequestsMetrics, + }, + { + "dimension": "resource_non200_status", + "metrics": "requests", + "expected": dimension_metrics.ResourceNon200StatusRequestsMetrics, + }, + ] + for test in data: + result = dimension_metrics.DimensionMetricsFactory.create_dimension_metrics( + DimensionEnum(test["dimension"]), + MetricsEnum(test["metrics"]), + ) + assert isinstance(result, test["expected"]) diff --git a/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_helpers.py b/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_helpers.py deleted file mode 100644 index 8fc550801..000000000 --- a/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_helpers.py +++ /dev/null @@ -1,388 +0,0 @@ -# -*- coding: utf-8 -*- -# -# TencentBlueKing is pleased to support the open source community by making -# 蓝鲸智云 - API 网关(BlueKing - APIGateway) available. -# Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. -# Licensed under the MIT License (the "License"); you may not use this file except -# in compliance with the License. You may obtain a copy of the License at -# -# http://opensource.org/licenses/MIT -# -# Unless required by applicable law or agreed to in writing, software distributed under -# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, -# either express or implied. See the License for the specific language governing permissions and -# limitations under the License. -# -# We undertake not to change the open source license (MIT license) applicable -# to the current version of the project delivered to anyone in the future. -# -from apigateway.apps.metrics import helpers - - -class TestDimensionMetricsManager: - def test_create_dimension_metrics(self): - data = [ - { - "dimension": "all", - "metrics": "requests", - "expected": helpers.RequestsMetrics, - }, - { - "dimension": "all", - "metrics": "failed_requests", - "expected": helpers.FailedRequestsMetrics, - }, - { - "dimension": "all", - "metrics": "response_time_95th", - "expected": helpers.ResponseTime95thMetrics, - }, - { - "dimension": "all", - "metrics": "response_time_50th", - "expected": helpers.ResponseTime50thMetrics, - }, - { - "dimension": "resource", - "metrics": "requests", - "expected": helpers.ResourceRequestsMetrics, - }, - { - "dimension": "app", - "metrics": "requests", - "expected": helpers.AppRequestsMetrics, - }, - { - "dimension": "resource_non200_status", - "metrics": "requests", - "expected": helpers.ResourceNon200StatusRequestsMetrics, - }, - ] - for test in data: - result = helpers.DimensionMetricsManager.create_dimension_metrics( - test["dimension"], - test["metrics"], - ) - assert isinstance(result, test["expected"]) - - -class TestRequestsMetrics: - def test_get_query_expression(self, mocker): - mocker.patch("apigateway.apps.metrics.helpers.BaseDimensionMetrics.default_labels", return_value=[]) - mocker.patch( - "apigateway.apps.metrics.helpers.BasePrometheusMetrics.metric_name_prefix", - new_callable=mocker.PropertyMock(return_value="apigateway_"), - ) - - data = [ - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": 1, - "step": "1m", - }, - "expected": ( - 'sum(increase(apigateway_apigateway_api_requests_total{job="apigateway", api="2", ' - 'stage="prod", resource="1"}[1m]))' - ), - }, - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": None, - "step": "1m", - }, - "expected": ( - 'sum(increase(apigateway_apigateway_api_requests_total{job="apigateway", api="2", ' - 'stage="prod"}[1m]))' - ), - }, - ] - for test in data: - metrics = helpers.RequestsMetrics() - result = metrics.get_query_expression(**test["params"]) - assert result == test["expected"] - - -class TestFailedRequestsMetrics: - def test_get_query_expression(self, mocker): - mocker.patch("apigateway.apps.metrics.helpers.BaseDimensionMetrics.default_labels", return_value=[]) - mocker.patch( - "apigateway.apps.metrics.helpers.BasePrometheusMetrics.metric_name_prefix", - new_callable=mocker.PropertyMock(return_value="apigateway_"), - ) - - data = [ - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": 1, - "step": "1m", - }, - "expected": ( - 'sum(increase(apigateway_apigateway_api_requests_total{job="apigateway", api="2", ' - 'stage="prod", resource="1", proxy_error="1"}[1m]))' - ), - }, - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": None, - "step": "1m", - }, - "expected": ( - 'sum(increase(apigateway_apigateway_api_requests_total{job="apigateway", api="2", ' - 'stage="prod", proxy_error="1"}[1m]))' - ), - }, - ] - for test in data: - metrics = helpers.FailedRequestsMetrics() - result = metrics.get_query_expression(**test["params"]) - assert result == test["expected"], result - - -class TestResponseTime95thMetrics: - def test_get_query_expression(self, mocker): - mocker.patch("apigateway.apps.metrics.helpers.BaseDimensionMetrics.default_labels", return_value=[]) - mocker.patch( - "apigateway.apps.metrics.helpers.BasePrometheusMetrics.metric_name_prefix", - new_callable=mocker.PropertyMock(return_value="apigateway_"), - ) - - data = [ - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": 1, - "step": "1m", - }, - "expected": ( - "histogram_quantile(0.95, sum(rate(apigateway_apigateway_api_request_duration_milliseconds_bucket{" - 'job="apigateway", api="2", stage="prod", resource="1"}[1m])) by (le, api))' - ), - }, - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": None, - "step": "1m", - }, - "expected": ( - "histogram_quantile(0.95, sum(rate(apigateway_apigateway_api_request_duration_milliseconds_bucket{" - 'job="apigateway", api="2", stage="prod"}[1m])) ' - "by (le, api))" - ), - }, - ] - for test in data: - metrics = helpers.ResponseTime95thMetrics() - result = metrics.get_query_expression(**test["params"]) - assert result == test["expected"], result - - -class TestResponseTime50thMetrics: - def test_get_query_expression(self, mocker): - mocker.patch("apigateway.apps.metrics.helpers.BaseDimensionMetrics.default_labels", return_value=[]) - mocker.patch( - "apigateway.apps.metrics.helpers.BasePrometheusMetrics.metric_name_prefix", - new_callable=mocker.PropertyMock(return_value="apigateway_"), - ) - - data = [ - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": 1, - "step": "1m", - }, - "expected": ( - "histogram_quantile(0.5, sum(rate(apigateway_apigateway_api_request_duration_milliseconds_bucket{" - 'job="apigateway", api="2", stage="prod", resource="1"}[1m])) by (le, api))' - ), - }, - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": None, - "step": "1m", - }, - "expected": ( - "histogram_quantile(0.5, sum(rate(apigateway_apigateway_api_request_duration_milliseconds_bucket{" - 'job="apigateway", api="2", stage="prod"}[1m])) ' - "by (le, api))" - ), - }, - ] - for test in data: - metrics = helpers.ResponseTime50thMetrics() - result = metrics.get_query_expression(**test["params"]) - assert result == test["expected"], result - - -class TestResourceRequestsMetrics: - def test_get_query_expression(self, mocker): - mocker.patch("apigateway.apps.metrics.helpers.BaseDimensionMetrics.default_labels", return_value=[]) - mocker.patch( - "apigateway.apps.metrics.helpers.BasePrometheusMetrics.metric_name_prefix", - new_callable=mocker.PropertyMock(return_value="apigateway_"), - ) - - data = [ - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": 1, - "step": "1m", - }, - "expected": ( - 'topk(10, sum(increase(apigateway_apigateway_api_requests_total{job="apigateway", api="2", ' - 'stage="prod", resource="1"}[1m])) by (api, resource, path))' - ), - }, - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": None, - "step": "1m", - }, - "expected": ( - 'topk(10, sum(increase(apigateway_apigateway_api_requests_total{job="apigateway", api="2", ' - 'stage="prod"}[1m])) by (api, resource, path))' - ), - }, - ] - for test in data: - metrics = helpers.ResourceRequestsMetrics() - result = metrics.get_query_expression(**test["params"]) - assert result == test["expected"], result - - -class TestResourceFailedRequestsMetrics: - def test_get_query_expression(self, mocker): - mocker.patch("apigateway.apps.metrics.helpers.BaseDimensionMetrics.default_labels", return_value=[]) - mocker.patch( - "apigateway.apps.metrics.helpers.BasePrometheusMetrics.metric_name_prefix", - new_callable=mocker.PropertyMock(return_value="apigateway_"), - ) - - data = [ - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": 1, - "step": "1m", - }, - "expected": ( - 'topk(10, sum(increase(apigateway_apigateway_api_requests_total{job="apigateway", api="2", ' - 'stage="prod", resource="1", proxy_error="1"}[1m])) by (api, resource, path))' - ), - }, - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": None, - "step": "1m", - }, - "expected": ( - 'topk(10, sum(increase(apigateway_apigateway_api_requests_total{job="apigateway", api="2", ' - 'stage="prod", proxy_error="1"}[1m])) by (api, resource, path))' - ), - }, - ] - for test in data: - metrics = helpers.ResourceFailedRequestsMetrics() - result = metrics.get_query_expression(**test["params"]) - assert result == test["expected"], result - - -class TestAppRequestsMetrics: - def test_get_query_expression(self, mocker): - mocker.patch("apigateway.apps.metrics.helpers.BaseDimensionMetrics.default_labels", return_value=[]) - mocker.patch( - "apigateway.apps.metrics.helpers.BasePrometheusMetrics.metric_name_prefix", - new_callable=mocker.PropertyMock(return_value="apigateway_"), - ) - - data = [ - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": 1, - "step": "1m", - }, - "expected": ( - 'topk(10, sum(increase(apigateway_apigateway_app_requests_total{job="apigateway", api="2", ' - 'stage="prod", resource="1"}[1m])) by (api, app_code))' - ), - }, - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": None, - "step": "1m", - }, - "expected": ( - 'topk(10, sum(increase(apigateway_apigateway_app_requests_total{job="apigateway", api="2", ' - 'stage="prod"}[1m])) by (api, app_code))' - ), - }, - ] - for test in data: - metrics = helpers.AppRequestsMetrics() - result = metrics.get_query_expression(**test["params"]) - assert result == test["expected"], result - - -class TestResourceNon200StatusRequestsMetrics: - def test_get_query_expression(self, mocker): - mocker.patch("apigateway.apps.metrics.helpers.BaseDimensionMetrics.default_labels", return_value=[]) - mocker.patch( - "apigateway.apps.metrics.helpers.BasePrometheusMetrics.metric_name_prefix", - new_callable=mocker.PropertyMock(return_value="apigateway_"), - ) - - data = [ - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": 1, - "step": "1m", - }, - "expected": ( - 'topk(10, sum(increase(apigateway_apigateway_api_requests_total{job="apigateway", api="2", ' - 'stage="prod", status!="200", resource="1"}[1m])) by (api, resource, path, status))' - ), - }, - { - "params": { - "gateway_id": 2, - "stage_name": "prod", - "resource_id": None, - "step": "1m", - }, - "expected": ( - 'topk(10, sum(increase(apigateway_apigateway_api_requests_total{job="apigateway", api="2", ' - 'stage="prod", status!="200"}[1m])) by (api, resource, path, status))' - ), - }, - ] - for test in data: - metrics = helpers.ResourceNon200StatusRequestsMetrics() - result = metrics.get_query_expression(**test["params"]) - assert result == test["expected"], result diff --git a/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_statistics.py b/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_statistics.py new file mode 100644 index 000000000..1f9d98d45 --- /dev/null +++ b/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_statistics.py @@ -0,0 +1,176 @@ +# +# TencentBlueKing is pleased to support the open source community by making +# 蓝鲸智云 - API 网关(BlueKing - APIGateway) available. +# Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. +# Licensed under the MIT License (the "License"); you may not use this file except +# in compliance with the License. You may obtain a copy of the License at +# +# http://opensource.org/licenses/MIT +# +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific language governing permissions and +# limitations under the License. +# +# We undertake not to change the open source license (MIT license) applicable +# to the current version of the project delivered to anyone in the future. +# +import pytest + +from apigateway.apps.metrics.models import StatisticsAPIRequestByDay, StatisticsAppRequestByDay +from apigateway.apps.metrics.statistics import StatisticsHandler +from apigateway.utils.time import now_datetime + + +@pytest.fixture() +def fake_statistics_api_request_metrics(fake_resource): + fake_gateway = fake_resource.api + return { + "series": [ + { + "dimensions": { + "api_name": fake_gateway.name, + "stage_name": "prod", + "resource_name": fake_resource.name, + "proxy_error": "0", + }, + "datapoints": [[5, 1689292799000]], + }, + { + "dimensions": { + "api_name": fake_gateway.name, + "stage_name": "prod", + "resource_name": fake_resource.name, + "proxy_error": "1", + }, + "datapoints": [[2, 1689292799000]], + }, + ] + } + + +@pytest.fixture() +def fake_statistics_api_request_duration_metrics(fake_resource): + fake_gateway = fake_resource.api + return { + "series": [ + { + "dimensions": { + "api_name": fake_gateway.name, + "stage_name": "prod", + "resource_name": fake_resource.name, + }, + "datapoints": [[154, 1689292799000]], + }, + ] + } + + +@pytest.fixture() +def fake_statistics_app_request_metrics(fake_resource): + fake_gateway = fake_resource.api + return { + "series": [ + { + "dimensions": { + "api_name": fake_gateway.name, + "stage_name": "prod", + "resource_name": fake_resource.name, + }, + "datapoints": [[2, 1689292799000]], + }, + { + "dimensions": { + "bk_app_code": "app1", + "api_name": fake_gateway.name, + "stage_name": "prod", + "resource_name": fake_resource.name, + }, + "datapoints": [[0, 1689292799000]], + }, + { + "dimensions": { + "bk_app_code": "app2", + "api_name": fake_gateway.name, + "stage_name": "prod", + "resource_name": fake_resource.name, + }, + "datapoints": [[3, 1689292799000]], + }, + { + "dimensions": { + "bk_app_code": "app2", + "api_name": fake_gateway.name, + "stage_name": "test", + "resource_name": fake_resource.name, + }, + "datapoints": [[1736, 1689292799000]], + }, + ] + } + + +class TestStatisticsHandler: + def test_save_api_request_data( + self, + mocker, + fake_resource, + fake_statistics_api_request_metrics, + fake_statistics_api_request_duration_metrics, + ): + mocker.patch( + "apigateway.apps.metrics.statistics.StatisticsAPIRequestMetrics.query", + return_value=fake_statistics_api_request_metrics, + ) + mocker.patch( + "apigateway.apps.metrics.statistics.StatisticsAPIRequestDurationMetrics.query", + return_value=fake_statistics_api_request_duration_metrics, + ) + fake_gateway = fake_resource.api + + now = now_datetime() + + handler = StatisticsHandler() + handler._save_api_request_data(now, now, "1m") + + assert StatisticsAPIRequestByDay.objects.filter(api_id=fake_gateway.id).count() == 1 + record = StatisticsAPIRequestByDay.objects.get(api_id=fake_gateway.id, resource_id=fake_resource.id) + assert record.total_count == 7 + assert record.failed_count == 2 + assert record.total_msecs == 154 + + def test_save_app_request_data(self, mocker, fake_resource, fake_statistics_app_request_metrics): + fake_gateway = fake_resource.api + mocker.patch( + "apigateway.apps.metrics.statistics.StatisticsAppRequestMetrics.query", + return_value=fake_statistics_app_request_metrics, + ) + + now = now_datetime() + + handler = StatisticsHandler() + handler._save_app_request_data(now, now, "1m") + + assert StatisticsAppRequestByDay.objects.filter(api_id=fake_gateway.id).count() == 3 + assert StatisticsAppRequestByDay.objects.filter(api_id=fake_gateway.id, bk_app_code="app2").count() == 2 + + record1 = StatisticsAppRequestByDay.objects.get(api_id=fake_gateway.id, bk_app_code="") + assert record1.total_count == 2 + + record2 = StatisticsAppRequestByDay.objects.get(api_id=fake_gateway.id, bk_app_code="app2", stage_name="test") + assert record2.total_count == 1736 + + def test_get_gateway_id(self, fake_gateway): + handler = StatisticsHandler() + + assert handler._get_gateway_id(fake_gateway.name) == fake_gateway.id + + def test_get_resource_id(self, fake_resource): + fake_gateway = fake_resource.api + + handler = StatisticsHandler() + + assert handler._gateway_id_to_resources == {} + assert handler._get_resource_id(fake_gateway.id, fake_resource.name) == fake_resource.id + assert handler._get_resource_id(fake_gateway.id, "not-exist-resource") is None + assert fake_gateway.id in handler._gateway_id_to_resources diff --git a/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_stats_metrics.py b/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_stats_metrics.py new file mode 100644 index 000000000..c488cb376 --- /dev/null +++ b/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_stats_metrics.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +# +# TencentBlueKing is pleased to support the open source community by making +# 蓝鲸智云 - API 网关(BlueKing - APIGateway) available. +# Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. +# Licensed under the MIT License (the "License"); you may not use this file except +# in compliance with the License. You may obtain a copy of the License at +# +# http://opensource.org/licenses/MIT +# +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific language governing permissions and +# limitations under the License. +# +# We undertake not to change the open source license (MIT license) applicable +# to the current version of the project delivered to anyone in the future. +# +import pytest + +from apigateway.apps.metrics import stats_metrics + + +class TestStatisticsAPIRequestMetrics: + @pytest.mark.parametrize( + "step, expected", + [ + ( + "5m", + ( + "sum(increase(bk_apigateway_apigateway_api_requests_total{}[5m])) " + "by (api_name, stage_name, resource_name, proxy_error)" + ), + ) + ], + ) + def test_get_query_promql(self, mocker, step, expected): + mocker.patch("apigateway.apps.metrics.dimension_metrics.BaseDimensionMetrics.default_labels", return_value=[]) + + metrics = stats_metrics.StatisticsAPIRequestMetrics() + result = metrics._get_query_promql(step) + assert result == expected + + +class TestStatisticsAPIRequestDurationMetrics: + @pytest.mark.parametrize( + "step, expected", + [ + ( + "5m", + ( + "sum(increase(bk_apigateway_apigateway_api_request_duration_milliseconds_sum{}[5m])) " + "by (api_name, stage_name, resource_name)" + ), + ) + ], + ) + def test_get_query_promql(self, mocker, step, expected): + mocker.patch("apigateway.apps.metrics.dimension_metrics.BaseDimensionMetrics.default_labels", return_value=[]) + + metrics = stats_metrics.StatisticsAPIRequestDurationMetrics() + result = metrics._get_query_promql(step) + assert result == expected + + +class TestStatisticsAppRequestMetrics: + @pytest.mark.parametrize( + "step, expected", + [ + ( + "5m", + ( + "sum(increase(bk_apigateway_apigateway_app_requests_total{}[5m])) " + "by (app_code, api_name, stage_name, resource_name)" + ), + ) + ], + ) + def test_get_query_promql(self, mocker, step, expected): + mocker.patch("apigateway.apps.metrics.dimension_metrics.BaseDimensionMetrics.default_labels", return_value=[]) + + metrics = stats_metrics.StatisticsAppRequestMetrics() + result = metrics._get_query_promql(step) + assert result == expected + + +class TestStatisticsAppRequestByResourceMetrics: + @pytest.mark.parametrize( + "step, expected", + [ + ( + "5m", + ( + "sum(increase(bk_apigateway_apigateway_app_requests_total{}[5m])) " + "by (app_code, api_name, resource_name)" + ), + ) + ], + ) + def test_get_query_promql(self, mocker, step, expected): + mocker.patch("apigateway.apps.metrics.dimension_metrics.BaseDimensionMetrics.default_labels", return_value=[]) + + metrics = stats_metrics.StatisticsAppRequestByResourceMetrics() + result = metrics._get_query_promql(step) + assert result == expected diff --git a/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_utils.py b/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_utils.py new file mode 100644 index 000000000..a828b2932 --- /dev/null +++ b/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_utils.py @@ -0,0 +1,41 @@ +# +# TencentBlueKing is pleased to support the open source community by making +# 蓝鲸智云 - API 网关(BlueKing - APIGateway) available. +# Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. +# Licensed under the MIT License (the "License"); you may not use this file except +# in compliance with the License. You may obtain a copy of the License at +# +# http://opensource.org/licenses/MIT +# +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific language governing permissions and +# limitations under the License. +# +# We undertake not to change the open source license (MIT license) applicable +# to the current version of the project delivered to anyone in the future. +# +import pytest + +from apigateway.apps.metrics.utils import MetricsSmartTimeRange + + +class TestMetricsSmartTimeRange: + @pytest.mark.parametrize( + "time_range_minutes, expected", + [ + (10, "1m"), + (59, "1m"), + (60, "1m"), + (300, "5m"), + (360, "5m"), + (720, "10m"), + (1440, "30m"), + (4320, "1h"), + (10080, "3h"), + (20000, "12h"), + ], + ) + def test_get_recommended_step(self, time_range_minutes, expected): + smart_time_range = MetricsSmartTimeRange(time_range=time_range_minutes * 60) + assert smart_time_range.get_recommended_step() == expected diff --git a/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_views.py b/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_views.py index 1585b65bb..b955d9f35 100644 --- a/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_views.py +++ b/src/dashboard/apigateway/apigateway/tests/apps/metrics/test_views.py @@ -16,112 +16,45 @@ # We undertake not to change the open source license (MIT license) applicable # to the current version of the project delivered to anyone in the future. # -import json - -import pytest -from django_dynamic_fixture import G - -from apigateway.apps.metrics.views import QueryRangeAPIView -from apigateway.components.prometheus import QueryRangeResult -from apigateway.core.models import Stage -from apigateway.tests.utils.testing import APIRequestFactory, create_gateway, get_response_json class TestQueryRangeAPIView: - @pytest.fixture(autouse=True) - def setup_test_data(self): - self.factory = APIRequestFactory() - self.gateway = create_gateway() - self.stage = G(Stage, api=self.gateway, name="prod") - - def test_get(self, mocker, settings): - settings.PROMETHEUS_METRIC_NAME_PREFIX = "apigateway_" - - data = [ - { - "params": { - "stage_id": self.stage.id, - "resource_id": "", - "dimension": "all", - "metrics": "requests", - "time_range": 3600, - }, - "mock_metrics": "apigateway.apps.metrics.helpers.RequestsMetrics", - "mock_query_expression": ( - 'sum(increase(apigateway_apigateway_api_requests_total{job="apigateway", api="2", ' - 'stage="prod", resource=~".*"}[1m]))' - ), + def test_get(self, mocker, fake_stage, request_view): + mocker.patch( + "apigateway.apps.metrics.views.DimensionMetricsFactory.create_dimension_metrics", + return_value=mocker.Mock(query_range=mocker.Mock(return_value={"foo": "bar"})), + ) + + response = request_view( + "GET", + "metrics.query_range", + path_params={ + "gateway_id": fake_stage.api.id, }, - { - "params": { - "stage_id": self.stage.id, - "resource_id": "", - "dimension": "all", - "metrics": "failed_requests", - "time_range": 3600, - }, - "mock_metrics": "apigateway.apps.metrics.helpers.FailedRequestsMetrics", - "mock_query_expression": ( - 'sum(increase(apigateway_apigateway_api_requests_total{job="apigateway", api="2", ' - 'stage="prod", resource=~".*", proxy_error="1"}[1m]))' - ), + data={ + "stage_id": fake_stage.id, + "dimension": "all", + "metrics": "requests", + "time_range": 300, }, - { - "params": { - "stage_id": self.stage.id, - "resource_id": "", - "dimension": "all", - "metrics": "response_time_95th", - "time_range": 3600, - }, - "mock_metrics": "apigateway.apps.metrics.helpers.ResponseTime95thMetrics", - "mock_query_expression": ( - "histogram_quantile(0.95, sum(rate(apigateway_apigateway_api_request_duration_milliseconds_bucket{" - 'job="apigateway", api="2", stage="prod", resource=~".*"}[1m])) by (le, api))' - ), + ) + result = response.json() + assert response.status_code == 200 + assert result["result"] is True + assert result["data"] == {"foo": "bar"} + + # stage not found + response = request_view( + "GET", + "metrics.query_range", + path_params={ + "gateway_id": fake_stage.api.id, }, - { - "params": { - "stage_id": self.stage.id, - "resource_id": "", - "dimension": "app", - "metrics": "requests", - "time_range": 3600, - }, - "mock_metrics": "apigateway.apps.metrics.helpers.AppRequestsMetrics", - "mock_query_expression": ( - 'topk(10, sum(increase(apigateway_apigateway_app_requests_total{job="apigateway", api="2", ' - 'stage="prod", resource=~".*"}[1m])) by (api, app_code))' - ), + data={ + "stage_id": 0, + "dimension": "all", + "metrics": "requests", + "time_range": 300, }, - ] - - for test in data: - query_range = mocker.patch("apigateway.apps.metrics.views.prometheus_component.query_range") - get_query_expression = mocker.patch(f'{test["mock_metrics"]}.get_query_expression') - query_range.return_value = QueryRangeResult( - **{ - "resultType": "matrix", - "result": [ - { - "metric": { - "api": "2", - "app_code": "test", - }, - "values": [ - [1582880683, "22698.666666666664"], - ], - } - ], - } - ) - get_query_expression.return_value = test["mock_query_expression"] - - request = self.factory.get(f"/apis/{self.gateway.id}/metrics/query_range/", data=test["params"]) - - view = QueryRangeAPIView.as_view() - response = view(request, gateway_id=self.gateway.id) - result = get_response_json(response) - - assert response.status_code == 200, json.dumps(result) - assert result["result"] is True + ) + assert response.status_code == 404 diff --git a/src/dashboard/apigateway/apigateway/tests/apps/permission/test_managers.py b/src/dashboard/apigateway/apigateway/tests/apps/permission/test_managers.py index add7ac494..cb3ff204e 100644 --- a/src/dashboard/apigateway/apigateway/tests/apps/permission/test_managers.py +++ b/src/dashboard/apigateway/apigateway/tests/apps/permission/test_managers.py @@ -25,7 +25,7 @@ from apigateway.apps.permission import models from apigateway.core.models import Gateway, Resource from apigateway.tests.utils.testing import dummy_time -from apigateway.utils.time import now_datetime +from apigateway.utils.time import now_datetime, to_datetime_from_now # from apigateway.apps.permission import constants @@ -116,15 +116,25 @@ def test_renew_permission(self): models.AppAPIPermission, api=self.gateway, bk_app_code="test-2", - expires=dummy_time.time, + expires=to_datetime_from_now(170), + ) + perm_3 = G( + models.AppAPIPermission, + api=self.gateway, + bk_app_code="test-3", + expires=to_datetime_from_now(days=720), ) models.AppAPIPermission.objects.renew_permission( self.gateway, - ids=[perm_1.id, perm_2.id], + ids=[perm_1.id, perm_2.id, perm_3.id], ) perm_1 = models.AppAPIPermission.objects.get(id=perm_1.id) - assert 180 * 24 * 3600 - 10 < (perm_1.expires - now_datetime()).total_seconds() < 180 * 24 * 3600 + perm_2 = models.AppAPIPermission.objects.get(id=perm_2.id) + perm_3 = models.AppAPIPermission.objects.get(id=perm_3.id) + assert to_datetime_from_now(days=179) < perm_1.expires < to_datetime_from_now(days=181) + assert to_datetime_from_now(days=179) < perm_2.expires < to_datetime_from_now(days=181) + assert to_datetime_from_now(days=719) < perm_3.expires < to_datetime_from_now(days=721) def test_delete_permission(self, fake_gateway): p1 = G(models.AppAPIPermission, api=fake_gateway, bk_app_code="app1") @@ -239,16 +249,63 @@ def test_renew_permission(self): models.AppResourcePermission, api=self.gateway, bk_app_code="test-2", - expires=dummy_time.time, + expires=to_datetime_from_now(days=70), + resource_id=self.resource.id, + ) + perm_3 = G( + models.AppResourcePermission, + api=self.gateway, + bk_app_code="test-3", + expires=to_datetime_from_now(days=720), resource_id=self.resource.id, ) models.AppResourcePermission.objects.renew_permission( self.gateway, - ids=[perm_1.id, perm_2.id], + ids=[perm_1.id, perm_2.id, perm_3.id], + ) + perm_1 = models.AppResourcePermission.objects.get(id=perm_1.id) + perm_2 = models.AppResourcePermission.objects.get(id=perm_2.id) + perm_3 = models.AppResourcePermission.objects.get(id=perm_3.id) + assert to_datetime_from_now(days=179) < perm_1.expires < to_datetime_from_now(181) + assert to_datetime_from_now(days=179) < perm_2.expires < to_datetime_from_now(181) + assert to_datetime_from_now(days=719) < perm_3.expires < to_datetime_from_now(721) + + def test_renew_not_expired_permission(self): + perm_1 = G( + models.AppResourcePermission, + api=self.gateway, + bk_app_code="test-1", + expires=dummy_time.time, + resource_id=self.resource.id, ) + perm_2 = G( + models.AppResourcePermission, + api=self.gateway, + bk_app_code="test-2", + expires=to_datetime_from_now(days=70), + resource_id=self.resource.id, + ) + perm_3 = G( + models.AppResourcePermission, + api=self.gateway, + bk_app_code="test-3", + expires=to_datetime_from_now(days=720), + resource_id=self.resource.id, + ) + + for bk_app_code in ["test-1", "test-2", "test-3"]: + models.AppResourcePermission.objects.renew_not_expired_permissions( + self.gateway, + bk_app_code=bk_app_code, + resource_ids=[self.resource.id], + ) perm_1 = models.AppResourcePermission.objects.get(id=perm_1.id) - assert 180 * 24 * 3600 - 10 < (perm_1.expires - now_datetime()).total_seconds() < 180 * 24 * 3600 + perm_2 = models.AppResourcePermission.objects.get(id=perm_2.id) + perm_3 = models.AppResourcePermission.objects.get(id=perm_3.id) + assert perm_1.expires == dummy_time.time + assert to_datetime_from_now(days=179) < perm_2.expires < to_datetime_from_now(181) + assert to_datetime_from_now(days=719) < perm_3.expires < to_datetime_from_now(721) def test_save_permissions(self): resource_1 = G(Resource, api=self.gateway) diff --git a/src/dashboard/apigateway/apigateway/tests/apps/permission/test_tasks.py b/src/dashboard/apigateway/apigateway/tests/apps/permission/test_tasks.py new file mode 100644 index 000000000..7f663972c --- /dev/null +++ b/src/dashboard/apigateway/apigateway/tests/apps/permission/test_tasks.py @@ -0,0 +1,96 @@ +# +# TencentBlueKing is pleased to support the open source community by making +# 蓝鲸智云 - API 网关(BlueKing - APIGateway) available. +# Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. +# Licensed under the MIT License (the "License"); you may not use this file except +# in compliance with the License. You may obtain a copy of the License at +# +# http://opensource.org/licenses/MIT +# +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific language governing permissions and +# limitations under the License. +# +# We undertake not to change the open source license (MIT license) applicable +# to the current version of the project delivered to anyone in the future. +# +from ddf import G + +from apigateway.apps.metrics.models import StatisticsAppRequestByDay +from apigateway.apps.permission.models import AppResourcePermission +from apigateway.apps.permission.tasks import renew_app_resource_permission +from apigateway.utils.time import now_datetime, to_datetime_from_now + + +class TestRenewAppResourcePermission: + def test(self, fake_gateway, unique_id): + bk_app_code = unique_id + now = now_datetime() + + G( + StatisticsAppRequestByDay, + api_id=fake_gateway.id, + bk_app_code=bk_app_code, + resource_id=1, + end_time=to_datetime_from_now(days=-3), + ) + G(StatisticsAppRequestByDay, api_id=fake_gateway.id, bk_app_code=bk_app_code, resource_id=2, end_time=now) + G(StatisticsAppRequestByDay, api_id=fake_gateway.id, bk_app_code=bk_app_code, resource_id=3, end_time=now) + G(StatisticsAppRequestByDay, api_id=fake_gateway.id, bk_app_code=bk_app_code, resource_id=4, end_time=now) + G(StatisticsAppRequestByDay, api_id=fake_gateway.id, bk_app_code=bk_app_code, resource_id=5, end_time=now) + + G( + AppResourcePermission, + api=fake_gateway, + bk_app_code=bk_app_code, + resource_id=1, + expires=to_datetime_from_now(days=3), + ) + G( + AppResourcePermission, + api=fake_gateway, + bk_app_code=bk_app_code, + resource_id=2, + expires=to_datetime_from_now(days=-3), + ) + G( + AppResourcePermission, + api=fake_gateway, + bk_app_code=bk_app_code, + resource_id=3, + expires=to_datetime_from_now(days=3), + ) + G( + AppResourcePermission, + api=fake_gateway, + bk_app_code=bk_app_code, + resource_id=4, + expires=to_datetime_from_now(days=720), + ) + G( + AppResourcePermission, + api=fake_gateway, + bk_app_code=bk_app_code, + resource_id=5, + expires=to_datetime_from_now(days=170), + ) + + renew_app_resource_permission() + + assert AppResourcePermission.objects.get( + api_id=fake_gateway.id, bk_app_code=bk_app_code, resource_id=1 + ).expires < to_datetime_from_now(days=4) + assert ( + AppResourcePermission.objects.get(api_id=fake_gateway.id, bk_app_code=bk_app_code, resource_id=2).expires + < now_datetime() + ) + assert AppResourcePermission.objects.get( + api_id=fake_gateway.id, bk_app_code=bk_app_code, resource_id=3 + ).expires > to_datetime_from_now(days=179) + assert AppResourcePermission.objects.get( + api_id=fake_gateway.id, bk_app_code=bk_app_code, resource_id=4 + ).expires > to_datetime_from_now(days=719) + assert AppResourcePermission.objects.get( + api_id=fake_gateway.id, bk_app_code=bk_app_code, resource_id=5 + ).expires > to_datetime_from_now(days=179) diff --git a/src/dashboard/apigateway/apigateway/tests/core/test_managers.py b/src/dashboard/apigateway/apigateway/tests/core/test_managers.py index bc419f200..82652f6a4 100644 --- a/src/dashboard/apigateway/apigateway/tests/core/test_managers.py +++ b/src/dashboard/apigateway/apigateway/tests/core/test_managers.py @@ -200,6 +200,15 @@ def test_get_gateway_name_to_active_stage_names(self): result = Stage.objects.get_gateway_name_to_active_stage_names([gateway]) assert result == {gateway.name: ["s1", "s3"]} + def test_get_name(self, fake_gateway): + s = G(Stage, api=fake_gateway) + + name = Stage.objects.get_name(fake_gateway.id, s.id) + assert name == s.name + + name = Stage.objects.get_name(fake_gateway.id, 0) + assert name is None + class TestResourceManager: @pytest.fixture(autouse=True) @@ -368,6 +377,15 @@ def test_get_resource_ids_by_names(self): resource_2.id, ] + def test_get_name(self, fake_gateway): + resource = G(Resource, api=fake_gateway) + + name = Resource.objects.get_name(fake_gateway.id, resource.id) + assert name == resource.name + + name = Resource.objects.get_name(fake_gateway.id, 0) + assert name is None + class TestContextManager(TestCase): pass